The typical structure of an Informatics MSc thesis:
Projects are assessed in terms of a number of basic and other criteria:
Project Introduction:
## packages
import sys
import os
import zipfile as zp
# graph-tool https://git.skewed.de/count0/graph-tool/-/wikis/installation-instructions
if 'graph_tool.all' not in sys.modules:
print('*** install graph-tool')
!conda install -c conda-forge graph-tool -v -y
print('*** finish!!!')
import numpy as np
import pandas as pd
# pd.set_option('display.float_format', lambda x: '%.3f' % x)
import matplotlib as mpl
import matplotlib.pyplot as plt
!pip install --upgrade scipy networkx
import networkx as nx
print("networkx: " + nx.__version__)
import graph_tool.all as gt
print("graph-tool: " + gt.__version__)
*** install graph-tool
/opt/conda/lib/python3.9/site-packages/requests/__init__.py:102: RequestsDependencyWarning: urllib3 (1.26.9) or chardet (5.0.0)/charset_normalizer (2.0.12) doesn't match a supported version!
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
Collecting package metadata (current_repodata.json): ...working... done
Solving environment: ...working...
The environment is inconsistent, please check the package plan carefully
The following packages are causing the inconsistency:
- conda-forge/noarch::alembic==1.7.7=pyhd8ed1ab_0
- conda-forge/linux-64::anyio==3.5.0=py39hf3d152e_0
- conda-forge/noarch::argon2-cffi==21.3.0=pyhd8ed1ab_0
- conda-forge/linux-64::argon2-cffi-bindings==21.2.0=py39hb9d737c_2
- conda-forge/noarch::asttokens==2.0.5=pyhd8ed1ab_0
- conda-forge/noarch::async_generator==1.10=py_0
- conda-forge/noarch::attrs==21.4.0=pyhd8ed1ab_0
- conda-forge/noarch::babel==2.9.1=pyh44b312d_0
- conda-forge/noarch::backcall==0.2.0=pyh9f0ad1d_0
- conda-forge/noarch::backports==1.0=py_2
- conda-forge/noarch::backports.functools_lru_cache==1.6.4=pyhd8ed1ab_0
- conda-forge/noarch::beautifulsoup4==4.11.1=pyha770c72_0
- conda-forge/noarch::bleach==5.0.0=pyhd8ed1ab_0
- conda-forge/noarch::blinker==1.4=py_1
- conda-forge/linux-64::brotlipy==0.7.0=py39hb9d737c_1004
- conda-forge/noarch::certipy==0.1.3=py_0
- conda-forge/linux-64::cffi==1.15.0=py39h4bc2ebd_0
- conda-forge/noarch::charset-normalizer==2.0.12=pyhd8ed1ab_0
- conda-forge/noarch::colorama==0.4.4=pyh9f0ad1d_0
- conda-forge/linux-64::conda==4.12.0=py39hf3d152e_0
- conda-forge/linux-64::conda-package-handling==1.8.1=py39hb9d737c_1
- conda-forge/linux-64::cryptography==36.0.2=py39hd97740a_1
- conda-forge/linux-64::debugpy==1.6.0=py39h5a03fae_0
- conda-forge/noarch::decorator==5.1.1=pyhd8ed1ab_0
- conda-forge/noarch::defusedxml==0.7.1=pyhd8ed1ab_0
- conda-forge/noarch::entrypoints==0.4=pyhd8ed1ab_0
- conda-forge/noarch::executing==0.8.3=pyhd8ed1ab_0
- conda-forge/noarch::flit-core==3.7.1=pyhd8ed1ab_0
- conda-forge/linux-64::greenlet==1.1.2=py39h5a03fae_2
- conda-forge/noarch::idna==3.3=pyhd8ed1ab_0
- conda-forge/linux-64::importlib-metadata==4.11.3=py39hf3d152e_1
- conda-forge/noarch::importlib_metadata==4.11.3=hd8ed1ab_1
- conda-forge/noarch::importlib_resources==5.7.1=pyhd8ed1ab_0
- conda-forge/linux-64::ipykernel==6.13.0=py39hef51801_0
- conda-forge/linux-64::ipython==8.3.0=py39hf3d152e_0
- conda-forge/noarch::ipython_genutils==0.2.0=py_1
- conda-forge/linux-64::jedi==0.18.1=py39hf3d152e_1
- conda-forge/noarch::jinja2==3.1.1=pyhd8ed1ab_0
- conda-forge/noarch::json5==0.9.5=pyh9f0ad1d_0
- conda-forge/noarch::jsonschema==4.4.0=pyhd8ed1ab_0
- conda-forge/noarch::jupyter_client==7.3.0=pyhd8ed1ab_0
- conda-forge/linux-64::jupyter_core==4.9.2=py39hf3d152e_0
- conda-forge/noarch::jupyter_server==1.17.0=pyhd8ed1ab_0
- conda-forge/noarch::jupyter_telemetry==0.1.0=pyhd8ed1ab_1
- conda-forge/noarch::jupyterhub==2.2.2=pyhd8ed1ab_1
- conda-forge/noarch::jupyterhub-base==2.2.2=pyhd8ed1ab_1
- conda-forge/noarch::jupyterlab==3.3.4=pyhd8ed1ab_0
- conda-forge/noarch::jupyterlab_pygments==0.2.2=pyhd8ed1ab_0
- conda-forge/noarch::jupyterlab_server==2.13.0=pyhd8ed1ab_1
- conda-forge/linux-64::libmambapy==0.23.0=py39hd55135b_1
- conda-forge/noarch::mako==1.2.0=pyhd8ed1ab_1
- conda-forge/linux-64::mamba==0.23.0=py39hfa8f2c8_1
- conda-forge/linux-64::markupsafe==2.1.1=py39hb9d737c_1
- conda-forge/noarch::matplotlib-inline==0.1.3=pyhd8ed1ab_0
- conda-forge/linux-64::mistune==0.8.4=py39h3811e60_1005
- conda-forge/noarch::nbclassic==0.3.7=pyhd8ed1ab_0
- conda-forge/noarch::nbconvert==6.5.0=pyhd8ed1ab_0
- conda-forge/noarch::nbconvert-core==6.5.0=pyhd8ed1ab_0
- conda-forge/noarch::nbconvert-pandoc==6.5.0=pyhd8ed1ab_0
- conda-forge/noarch::nbformat==5.3.0=pyhd8ed1ab_0
- conda-forge/noarch::nest-asyncio==1.5.5=pyhd8ed1ab_0
- conda-forge/noarch::notebook==6.4.11=pyha770c72_0
- conda-forge/noarch::notebook-shim==0.1.0=pyhd8ed1ab_0
- conda-forge/noarch::oauthlib==3.2.0=pyhd8ed1ab_0
- conda-forge/noarch::packaging==21.3=pyhd8ed1ab_0
- conda-forge/noarch::pamela==1.0.0=py_0
- conda-forge/noarch::pandocfilters==1.5.0=pyhd8ed1ab_0
- conda-forge/noarch::parso==0.8.3=pyhd8ed1ab_0
- conda-forge/noarch::pexpect==4.8.0=pyh9f0ad1d_2
- conda-forge/noarch::pickleshare==0.7.5=py_1003
- conda-forge/noarch::pip==22.0.4=pyhd8ed1ab_0
- conda-forge/noarch::prompt-toolkit==3.0.29=pyha770c72_0
- conda-forge/linux-64::psutil==5.9.0=py39hb9d737c_1
- conda-forge/noarch::ptyprocess==0.7.0=pyhd3deb0d_0
- conda-forge/noarch::pure_eval==0.2.2=pyhd8ed1ab_0
- conda-forge/linux-64::pycosat==0.6.3=py39hb9d737c_1010
- conda-forge/noarch::pycparser==2.21=pyhd8ed1ab_0
- conda-forge/linux-64::pycurl==7.45.1=py39hd73adbb_1
- conda-forge/noarch::pygments==2.12.0=pyhd8ed1ab_0
- conda-forge/noarch::pyopenssl==22.0.0=pyhd8ed1ab_0
- conda-forge/noarch::pyparsing==3.0.8=pyhd8ed1ab_0
- conda-forge/linux-64::pyrsistent==0.18.1=py39hb9d737c_1
- conda-forge/linux-64::pysocks==1.7.1=py39hf3d152e_5
- conda-forge/linux-64::python==3.9.12=h9a8a25e_1_cpython
- conda-forge/noarch::python-dateutil==2.8.2=pyhd8ed1ab_0
- conda-forge/noarch::python-fastjsonschema==2.15.3=pyhd8ed1ab_0
- conda-forge/noarch::python-json-logger==2.0.1=pyh9f0ad1d_0
- conda-forge/linux-64::python_abi==3.9=2_cp39
- conda-forge/noarch::pytz==2022.1=pyhd8ed1ab_0
- conda-forge/linux-64::pyzmq==22.3.0=py39headdf64_2
- conda-forge/linux-64::readline==8.1=h46c0cb4_0
- conda-forge/noarch::requests==2.27.1=pyhd8ed1ab_0
- conda-forge/linux-64::ruamel.yaml==0.17.21=py39hb9d737c_1
- conda-forge/linux-64::ruamel.yaml.clib==0.2.6=py39hb9d737c_1
- conda-forge/linux-64::ruamel_yaml==0.15.80=py39h3811e60_1006
- conda-forge/noarch::send2trash==1.8.0=pyhd8ed1ab_0
- conda-forge/linux-64::setuptools==62.1.0=py39hf3d152e_0
- conda-forge/noarch::six==1.16.0=pyh6c4a22f_0
- conda-forge/linux-64::sniffio==1.2.0=py39hf3d152e_3
- conda-forge/noarch::soupsieve==2.3.1=pyhd8ed1ab_0
- conda-forge/linux-64::sqlalchemy==1.4.36=py39hb9d737c_0
- conda-forge/linux-64::sqlite==3.38.3=h4ff8645_0
- conda-forge/noarch::stack_data==0.2.0=pyhd8ed1ab_0
- conda-forge/linux-64::terminado==0.13.3=py39hf3d152e_1
- conda-forge/noarch::tinycss2==1.1.1=pyhd8ed1ab_0
- conda-forge/linux-64::tornado==6.1=py39hb9d737c_3
- conda-forge/noarch::tqdm==4.64.0=pyhd8ed1ab_0
- conda-forge/noarch::traitlets==5.1.1=pyhd8ed1ab_0
- conda-forge/noarch::urllib3==1.26.9=pyhd8ed1ab_0
- conda-forge/noarch::wcwidth==0.2.5=pyh9f0ad1d_2
- conda-forge/noarch::webencodings==0.5.1=py_1
- conda-forge/noarch::websocket-client==1.3.2=pyhd8ed1ab_0
- conda-forge/noarch::wheel==0.37.1=pyhd8ed1ab_0
- conda-forge/noarch::zipp==3.8.0=pyhd8ed1ab_0
- conda-forge/linux-64::abseil-cpp==20211102.0=h93e1e8c_3
- conda-forge/noarch::absl-py==1.3.0=pyhd8ed1ab_0
- conda-forge/noarch::altair==4.2.0=pyhd8ed1ab_1
- conda-forge/noarch::appdirs==1.4.4=pyh9f0ad1d_0
- conda-forge/noarch::arviz==0.13.0=pyhd8ed1ab_0
- conda-forge/noarch::bokeh==2.4.3=pyhd8ed1ab_3
- conda-forge/noarch::cached-property==1.5.2=hd8ed1ab_1
- conda-forge/noarch::cached_property==1.5.2=pyha770c72_1
- conda-forge/noarch::cachetools==5.2.0=pyhd8ed1ab_0
- conda-forge/noarch::certifi==2022.9.24=pyhd8ed1ab_0
- conda-forge/linux-64::cftime==1.6.2=py39h2ae25f5_1
- conda-forge/linux-64::chardet==5.0.0=py39hf3d152e_1
- conda-forge/noarch::click==8.1.3=unix_pyhd8ed1ab_2
- conda-forge/noarch::cloudpickle==2.1.0=pyhd8ed1ab_0
- conda-forge/noarch::colorcet==3.0.1=pyhd8ed1ab_0
- conda-forge/linux-64::conda-build==3.21.8=py39hf3d152e_1
- conda-forge/linux-64::cvxpy==1.2.1=py39hf3d152e_0
- conda-forge/linux-64::cvxpy-base==1.2.1=py39h1832856_0
- conda-forge/noarch::cycler==0.11.0=pyhd8ed1ab_0
- conda-forge/linux-64::cython==0.29.30=py39h5a03fae_0
- conda-forge/linux-64::cytoolz==0.12.0=py39hb9d737c_1
- conda-forge/noarch::dask==2022.5.0=pyhd8ed1ab_0
- conda-forge/noarch::dask-core==2022.5.0=pyhd8ed1ab_0
- conda-forge/noarch::dask-glm==0.2.0=py_1
- conda-forge/noarch::dask-ml==2022.1.22=pyhd8ed1ab_0
- conda-forge/noarch::demes==0.2.2=pyhd8ed1ab_0
- conda-forge/noarch::demesdraw==0.3.0=pyhd8ed1ab_0
- conda-forge/noarch::deprecat==2.1.1=pyhd8ed1ab_0
- conda-forge/noarch::dill==0.3.5.1=pyhd8ed1ab_0
- conda-forge/noarch::distributed==2022.5.0=pyhd8ed1ab_0
- conda-forge/noarch::dnspython==2.2.1=pyhd8ed1ab_0
- conda-forge/linux-64::ecos==2.0.10=py39h2ae25f5_2
- conda-forge/noarch::elementpath==2.5.3=pyhd8ed1ab_0
- conda-forge/noarch::et_xmlfile==1.0.1=py_1001
- conda-forge/noarch::etils==0.9.0=pyhd8ed1ab_0
- conda-forge/noarch::fastprogress==1.0.3=pyhd8ed1ab_0
- conda-forge/noarch::filelock==3.8.0=pyhd8ed1ab_0
- conda-forge/linux-64::fonttools==4.38.0=py39hb9d737c_1
- conda-forge/noarch::fsspec==2022.10.0=pyhd8ed1ab_0
- conda-forge/noarch::gitdb==4.0.9=pyhd8ed1ab_0
- conda-forge/noarch::gitdb2==4.0.2=py_0
- conda-forge/noarch::gitpython==3.1.27=pyhd8ed1ab_0
- conda-forge/linux-64::glib==2.70.2=h780b84a_4
- conda-forge/noarch::glob2==0.7=py_0
- conda-forge/linux-64::gmpy2==2.1.2=py39h376b7d2_1
- conda-forge/linux-64::grpc-cpp==1.45.2=h38f6961_0
- conda-forge/linux-64::gst-plugins-base==1.20.2=hcf0ee16_0
- conda-forge/linux-64::gstreamer==1.20.3=hd4edc92_0
- conda-forge/linux-64::h5py==3.6.0=nompi_py39h7e08c79_100
- conda-forge/noarch::heapdict==1.0.1=py_0
- conda-forge/noarch::holoviews==1.15.1=pyhd8ed1ab_0
- conda-forge/noarch::hvplot==0.8.0=pyh6c4a22f_0
- conda-forge/linux-64::imagecodecs==2022.2.22=py39h9c0c3a3_5
- conda-forge/noarch::imageio==2.22.0=pyhfa7a67d_0
- conda-forge/noarch::ipympl==0.9.1=pyhd8ed1ab_0
- conda-forge/noarch::ipywidgets==7.7.0=pyhd8ed1ab_0
- conda-forge/noarch::jax==0.3.14=pyhd8ed1ab_1
- conda-forge/linux-64::jaxlib==0.3.10=cpu_py39h320bf12_0
- conda-forge/noarch::joblib==1.2.0=pyhd8ed1ab_0
- conda-forge/linux-64::jupyter==1.0.0=py39hf3d152e_7
- conda-forge/noarch::jupyter-server-mathjax==0.2.6=pyhc268e32_0
- conda-forge/noarch::jupyter_console==6.4.4=pyhd8ed1ab_0
- conda-forge/noarch::jupyter_contrib_core==0.4.0=pyhd8ed1ab_0
- conda-forge/noarch::jupyter_contrib_nbextensions==0.5.1=pyhd8ed1ab_2
- conda-forge/linux-64::jupyter_highlight_selected_word==0.2.0=py39hf3d152e_1005
- conda-forge/noarch::jupyter_latex_envs==1.4.6=pyhd8ed1ab_1002
- conda-forge/noarch::jupyter_nbextensions_configurator==0.4.1=pyhd8ed1ab_2
- conda-forge/noarch::jupyterlab_widgets==1.1.1=pyhd8ed1ab_0
- conda-forge/noarch::k3d==2.13.1=pyhd8ed1ab_0
- conda-forge/linux-64::kiwisolver==1.4.4=py39hf939315_1
- conda-forge/linux-64::libabseil==20211102.0=cxx17_h48a1fff_3
- conda-forge/noarch::pyct==0.4.6=py_0
- conda-forge/linux-64::libopencv==4.5.5=py39hb0e02d1_7
- conda-forge/linux-64::libpq==14.3=hd77ab85_0
- conda-forge/noarch::libpysal==4.6.2=pyhd8ed1ab_0
- conda-forge/linux-64::llvmlite==0.38.1=py39h7d9a04d_0
- conda-forge/noarch::locket==1.0.0=pyhd8ed1ab_0
- conda-forge/linux-64::lxml==4.8.0=py39hb9d737c_2
- conda-forge/linux-64::lz4==4.0.2=py39h029007f_0
- conda-forge/noarch::markdown==3.4.1=pyhd8ed1ab_0
- conda-forge/linux-64::matplotlib-base==3.5.2=py39h700656a_1
- conda-forge/linux-64::mkl-service==2.4.0=py39hb699420_0
- conda-forge/noarch::mock==4.0.3=pyhd8ed1ab_4
- conda-forge/noarch::mpld3==0.5.7=pyhd8ed1ab_0
- conda-forge/noarch::mpmath==1.2.1=pyhd8ed1ab_0
- conda-forge/linux-64::msgpack-python==1.0.4=py39hf939315_1
- conda-forge/noarch::multipledispatch==0.6.0=py_0
- conda-forge/noarch::munkres==1.1.4=pyh9f0ad1d_0
- conda-forge/linux-64::mysql-connector-python==8.0.29=py39h8d08e9c_0
- conda-forge/noarch::nbdime==3.1.1=pyhd8ed1ab_0
- conda-forge/linux-64::netcdf4==1.5.8=nompi_py39h64b754b_101
- conda-forge/noarch::networkx==2.8.7=pyhd8ed1ab_0
- conda-forge/noarch::nltk==3.6.7=pyhd8ed1ab_0
- conda-forge/noarch::nose==1.3.7=py_1006
- conda-forge/linux-64::nss==3.77=h2350873_0
- conda-forge/linux-64::numba==0.55.1=py39h66db6d7_1
- conda-forge/linux-64::numexpr==2.7.3=py39hde0f152_1
- conda-forge/linux-64::numpy==1.21.6=py39h18676bf_0
- conda-forge/linux-64::opencv==4.5.5=py39hf3d152e_7
- conda-forge/noarch::openpyxl==3.0.9=pyhd8ed1ab_0
- conda-forge/noarch::opt_einsum==3.3.0=pyhd8ed1ab_1
- conda-forge/linux-64::osqp==0.6.2.post0=py39hde0f152_3
- conda-forge/noarch::palettable==3.3.0=py_0
- conda-forge/linux-64::pandana==0.6.1=py39hde0f152_1
- conda-forge/linux-64::pandas==1.4.2=py39h1832856_2
- conda-forge/noarch::panel==0.14.0=pyhd8ed1ab_0
- conda-forge/noarch::param==1.12.2=pyh6c4a22f_0
- conda-forge/noarch::partd==1.3.0=pyhd8ed1ab_0
- conda-forge/noarch::patsy==0.5.2=pyhd8ed1ab_0
- conda-forge/linux-64::pillow==9.1.1=py39hae2aec6_0
- conda-forge/noarch::pkginfo==1.8.3=pyhd8ed1ab_0
- conda-forge/noarch::plotly==5.8.0=pyhd8ed1ab_1
- conda-forge/noarch::pointpats==2.2.0=pyhd8ed1ab_1
- conda-forge/noarch::polyline==1.4.0=py_0
- conda-forge/noarch::prompt_toolkit==3.0.29=hd8ed1ab_0
- conda-forge/linux-64::protobuf==3.19.4=py39he80948d_0
- conda-forge/linux-64::py-lief==0.12.2=py39h5a03fae_1
- conda-forge/linux-64::py-opencv==4.5.5=py39hef51801_7
- conda-forge/noarch::pyct-core==0.4.6=py_0
- conda-forge/linux-64::pygpu==0.7.6=py39hce5d2b2_1003
- conda-forge/linux-64::pykrige==1.6.1=py39h3811e60_1
- conda-forge/linux-64::pymc3==3.11.5=py39ha563b64_0
- conda-forge/linux-64::pyqt==5.12.3=py39hf3d152e_8
- conda-forge/linux-64::pyqt-impl==5.12.3=py39hde8b62d_8
- conda-forge/linux-64::pyqt5-sip==4.19.18=py39he80948d_8
- conda-forge/linux-64::pyqtchart==5.12=py39h0fcd23e_8
- conda-forge/linux-64::pyqtwebengine==5.12.1=py39h0fcd23e_8
- conda-forge/linux-64::pytables==3.6.1=py39h2669a42_5
- conda-forge/noarch::python-flatbuffers==2.0=pyhd8ed1ab_0
- conda-forge/noarch::python-graphviz==0.20=pyhaef67bd_0
- conda-forge/linux-64::python-libarchive-c==4.0=py39hf3d152e_2
- conda-forge/noarch::python-tzdata==2022.5=pyhd8ed1ab_0
- conda-forge/linux-64::pytz-deprecation-shim==0.1.0.post0=py39hf3d152e_3
- conda-forge/noarch::pyviz_comms==2.2.1=pyhd8ed1ab_1
- conda-forge/linux-64::pywavelets==1.3.0=py39h2ae25f5_2
- conda-forge/noarch::pyxlsb==1.0.9=pyhd8ed1ab_0
- conda-forge/linux-64::pyyaml==6.0=py39hb9d737c_5
- conda-forge/linux-64::qdldl-python==0.1.5.post2=py39h4661b88_0
- conda-forge/linux-64::qt==5.12.9=h1304e3e_6
- conda-forge/noarch::qtconsole==5.3.2=pyhd8ed1ab_0
- conda-forge/noarch::qtconsole-base==5.3.2=pyha770c72_0
- conda-forge/noarch::qtpy==2.2.1=pyhd8ed1ab_0
- conda-forge/linux-64::regex==2022.10.31=py39hb9d737c_0
- conda-forge/linux-64::rise==5.7.1=py39hf3d152e_1
- conda-forge/linux-64::scikit-image==0.19.2=py39hde0f152_0
- conda-forge/linux-64::scikit-learn==1.1.1=py39h4037b75_0
- conda-forge/linux-64::scipy==1.7.3=py39hee8e79c_0
- conda-forge/linux-64::scs==3.2.0=py39hbcdb44f_2
- conda-forge/noarch::seaborn==0.11.2=hd8ed1ab_0
- conda-forge/noarch::seaborn-base==0.11.2=pyhd8ed1ab_0
- conda-forge/noarch::semver==2.13.0=pyh9f0ad1d_0
- conda-forge/linux-64::shapely==1.8.5=py39h5b5020f_1
- conda-forge/noarch::smmap==3.0.5=pyh44b312d_0
- conda-forge/noarch::sortedcontainers==2.4.0=pyhd8ed1ab_0
- conda-forge/noarch::spectral==0.22.4=pyh6c4a22f_0
- conda-forge/linux-64::statsmodels==0.13.2=py39hd257fcd_0
- conda-forge/linux-64::sympy==1.10.1=py39hf3d152e_1
- conda-forge/noarch::tblib==1.7.0=pyhd8ed1ab_0
- conda-forge/noarch::tenacity==8.1.0=pyhd8ed1ab_0
- conda-forge/noarch::terminaltables==3.1.10=pyhd8ed1ab_0
- conda-forge/noarch::textblob==0.15.3=py_0
- conda-forge/linux-64::theano-pymc==1.1.2=py39he80948d_0
- conda-forge/noarch::threadpoolctl==3.1.0=pyh8a188c0_0
- conda-forge/noarch::tifffile==2022.10.10=pyhd8ed1ab_0
- conda-forge/noarch::toolz==0.12.0=pyhd8ed1ab_0
- conda-forge/noarch::traittypes==0.2.1=pyh9f0ad1d_2
- conda-forge/noarch::typing-extensions==4.4.0=hd8ed1ab_0
- conda-forge/noarch::typing_extensions==4.4.0=pyha770c72_0
- conda-forge/linux-64::tzlocal==4.2=py39hf3d152e_2
- conda-forge/linux-64::unicodedata2==15.0.0=py39hb9d737c_0
- conda-forge/linux-64::vega==3.6.0=py39hf3d152e_0
- conda-forge/noarch::vega_datasets==0.9.0=pyhd3deb0d_0
- conda-forge/noarch::vincent==0.4.4=py_1
- conda-forge/noarch::widgetsnbextension==3.6.0=pyha770c72_1
- conda-forge/linux-64::wordcloud==1.8.1=py39h3811e60_2
- conda-forge/linux-64::wrapt==1.14.1=py39hb9d737c_1
- conda-forge/noarch::xarray==2022.10.0=pyhd8ed1ab_0
- conda-forge/noarch::xarray-einstats==0.3.0=pyhd8ed1ab_0
- conda-forge/noarch::xlrd==2.0.1=pyhd8ed1ab_3
- conda-forge/noarch::xlsxwriter==3.0.3=pyhd8ed1ab_0
- conda-forge/noarch::xlwt==1.3.0=py_1
- conda-forge/noarch::xmlschema==1.11.1=pyhd8ed1ab_0
- conda-forge/noarch::zict==2.2.0=pyhd8ed1ab_0
failed with initial frozen solve. Retrying with flexible solve.
Solving environment: ...working... failed with repodata from current_repodata.json, will retry with next repodata source.
Collecting package metadata (repodata.json): ...working... done
Solving environment: ...working...
The environment is inconsistent, please check the package plan carefully
The following packages are causing the inconsistency:
- conda-forge/noarch::jupyter_server==1.17.0=pyhd8ed1ab_0
- conda-forge/noarch::jupyterhub==2.2.2=pyhd8ed1ab_1
- conda-forge/noarch::jupyterhub-base==2.2.2=pyhd8ed1ab_1
- conda-forge/noarch::jupyterlab==3.3.4=pyhd8ed1ab_0
- conda-forge/noarch::jupyterlab_server==2.13.0=pyhd8ed1ab_1
- conda-forge/noarch::nbclassic==0.3.7=pyhd8ed1ab_0
- conda-forge/noarch::nbconvert==6.5.0=pyhd8ed1ab_0
- conda-forge/noarch::nbconvert-core==6.5.0=pyhd8ed1ab_0
- conda-forge/noarch::nbconvert-pandoc==6.5.0=pyhd8ed1ab_0
- conda-forge/noarch::notebook==6.4.11=pyha770c72_0
- conda-forge/noarch::notebook-shim==0.1.0=pyhd8ed1ab_0
- conda-forge/noarch::oauthlib==3.2.0=pyhd8ed1ab_0
- conda-forge/linux-64::abseil-cpp==20211102.0=h93e1e8c_3
- conda-forge/linux-64::chardet==5.0.0=py39hf3d152e_1
- conda-forge/linux-64::conda-build==3.21.8=py39hf3d152e_1
- conda-forge/linux-64::grpc-cpp==1.45.2=h38f6961_0
- conda-forge/noarch::ipympl==0.9.1=pyhd8ed1ab_0
- conda-forge/noarch::ipywidgets==7.7.0=pyhd8ed1ab_0
- conda-forge/noarch::jax==0.3.14=pyhd8ed1ab_1
- conda-forge/linux-64::jaxlib==0.3.10=cpu_py39h320bf12_0
- conda-forge/linux-64::jupyter==1.0.0=py39hf3d152e_7
- conda-forge/noarch::jupyter-server-mathjax==0.2.6=pyhc268e32_0
- conda-forge/noarch::jupyter_contrib_core==0.4.0=pyhd8ed1ab_0
- conda-forge/noarch::jupyter_contrib_nbextensions==0.5.1=pyhd8ed1ab_2
- conda-forge/linux-64::jupyter_highlight_selected_word==0.2.0=py39hf3d152e_1005
- conda-forge/noarch::jupyter_latex_envs==1.4.6=pyhd8ed1ab_1002
- conda-forge/noarch::jupyter_nbextensions_configurator==0.4.1=pyhd8ed1ab_2
- conda-forge/noarch::k3d==2.13.1=pyhd8ed1ab_0
- conda-forge/linux-64::libabseil==20211102.0=cxx17_h48a1fff_3
- conda-forge/noarch::nbdime==3.1.1=pyhd8ed1ab_0
- conda-forge/linux-64::pymc3==3.11.5=py39ha563b64_0
- conda-forge/linux-64::rise==5.7.1=py39hf3d152e_1
- conda-forge/linux-64::theano-pymc==1.1.2=py39he80948d_0
- conda-forge/linux-64::vega==3.6.0=py39hf3d152e_0
- conda-forge/noarch::widgetsnbextension==3.6.0=pyha770c72_1
done
==> WARNING: A newer version of conda exists. <==
current version: 4.12.0
latest version: 23.7.2
Please update conda by running
$ conda update -n base conda
INFO conda.core.link:__init__(168): initializing UnlinkLinkTransaction with
target_prefix: /opt/conda
unlink_precs:
conda-forge/noarch::requests-2.27.1-pyhd8ed1ab_0
conda-forge/noarch::certifi-2022.9.24-pyhd8ed1ab_0
conda-forge/linux-64::abseil-cpp-20211102.0-h93e1e8c_3
conda-forge/linux-64::openssl-1.1.1s-h166bdaf_0
conda-forge/linux-64::libabseil-20211102.0-cxx17_h48a1fff_3
conda-forge/linux-64::ca-certificates-2022.9.24-ha878542_0
link_precs:
conda-forge/linux-64::ca-certificates-2023.7.22-hbcca054_0
conda-forge/linux-64::hicolor-icon-theme-0.17-ha770c72_2
conda-forge/linux-64::epoxy-1.5.10-h166bdaf_1
conda-forge/linux-64::libabseil-20211102.0-cxx17_h48a1fff_2
conda-forge/linux-64::openssl-1.1.1v-hd590300_0
conda-forge/linux-64::sigcpp-2.0-2.10.8-h27087fc_0
conda-forge/linux-64::sparsehash-2.0.4-hcb278e6_1
conda-forge/linux-64::xorg-recordproto-1.14.2-h7f98852_1002
conda-forge/linux-64::abseil-cpp-20211102.0-h93e1e8c_2
conda-forge/linux-64::boost-cpp-1.74.0-h6cacc03_7
conda-forge/linux-64::libcups-2.3.3-hf5a7f15_1
conda-forge/noarch::certifi-2023.7.22-pyhd8ed1ab_0
conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0
conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0
conda-forge/linux-64::xorg-libxtst-1.2.3-h7f98852_1002
conda-forge/linux-64::at-spi2-core-2.40.3-h0630a04_0
conda-forge/linux-64::boost-1.74.0-py39h5472131_5
conda-forge/linux-64::cairomm-1.0-1.12.2-h96a316c_4
conda-forge/linux-64::libgirepository-1.72.0-h26ff761_1
conda-forge/linux-64::pycairo-1.24.0-py39hc92de75_0
conda-forge/linux-64::zstandard-0.19.0-py39h6e5214e_2
conda-forge/linux-64::at-spi2-atk-2.38.0-h0630a04_3
conda-forge/linux-64::cairomm-1.12.2-ha770c72_4
conda-forge/linux-64::graph-tool-base-2.45-py39h2fa1a2d_2
conda-forge/linux-64::pygobject-3.42.1-py39habf54e5_0
conda-forge/linux-64::gtk3-3.24.33-h13ada96_1
conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0
conda-forge/linux-64::graph-tool-2.45-py39h7b6895e_2
conda-forge/noarch::requests-2.31.0-pyhd8ed1ab_0
## Package Plan ##
environment location: /opt/conda
added / updated specs:
- graph-tool
The following packages will be downloaded:
package | build
---------------------------|-----------------
abseil-cpp-20211102.0 | h93e1e8c_2 13 KB conda-forge
at-spi2-atk-2.38.0 | h0630a04_3 332 KB conda-forge
at-spi2-core-2.40.3 | h0630a04_0 643 KB conda-forge
boost-1.74.0 | py39h5472131_5 357 KB conda-forge
boost-cpp-1.74.0 | h6cacc03_7 16.3 MB conda-forge
ca-certificates-2023.7.22 | hbcca054_0 146 KB conda-forge
cairomm-1.12.2 | ha770c72_4 11 KB conda-forge
cairomm-1.0-1.12.2 | h96a316c_4 687 KB conda-forge
certifi-2023.7.22 | pyhd8ed1ab_0 150 KB conda-forge
epoxy-1.5.10 | h166bdaf_1 1.4 MB conda-forge
graph-tool-2.45 | py39h7b6895e_2 27 KB conda-forge
graph-tool-base-2.45 | py39h2fa1a2d_2 68.1 MB conda-forge
gtk3-3.24.33 | h13ada96_1 9.9 MB conda-forge
hicolor-icon-theme-0.17 | ha770c72_2 14 KB conda-forge
libabseil-20211102.0 | cxx17_h48a1fff_2 1.1 MB conda-forge
libcups-2.3.3 | hf5a7f15_1 4.6 MB conda-forge
libgirepository-1.72.0 | h26ff761_1 311 KB conda-forge
nbclient-0.6.4 | pyhd8ed1ab_0 65 KB conda-forge
openssl-1.1.1v | hd590300_0 1.9 MB conda-forge
prometheus_client-0.17.1 | pyhd8ed1ab_0 52 KB conda-forge
pycairo-1.24.0 | py39hc92de75_0 111 KB conda-forge
pygobject-3.42.1 | py39habf54e5_0 290 KB conda-forge
pyjwt-2.8.0 | pyhd8ed1ab_0 24 KB conda-forge
requests-2.31.0 | pyhd8ed1ab_0 55 KB conda-forge
sigcpp-2.0-2.10.8 | h27087fc_0 158 KB conda-forge
sparsehash-2.0.4 | hcb278e6_1 84 KB conda-forge
xorg-libxtst-1.2.3 | h7f98852_1002 31 KB conda-forge
xorg-recordproto-1.14.2 | h7f98852_1002 8 KB conda-forge
zstandard-0.19.0 | py39h6e5214e_2 384 KB conda-forge
------------------------------------------------------------
Total: 107.0 MB
The following NEW packages will be INSTALLED:
at-spi2-atk conda-forge/linux-64::at-spi2-atk-2.38.0-h0630a04_3
at-spi2-core conda-forge/linux-64::at-spi2-core-2.40.3-h0630a04_0
boost conda-forge/linux-64::boost-1.74.0-py39h5472131_5
boost-cpp conda-forge/linux-64::boost-cpp-1.74.0-h6cacc03_7
cairomm conda-forge/linux-64::cairomm-1.12.2-ha770c72_4
cairomm-1.0 conda-forge/linux-64::cairomm-1.0-1.12.2-h96a316c_4
epoxy conda-forge/linux-64::epoxy-1.5.10-h166bdaf_1
graph-tool conda-forge/linux-64::graph-tool-2.45-py39h7b6895e_2
graph-tool-base conda-forge/linux-64::graph-tool-base-2.45-py39h2fa1a2d_2
gtk3 conda-forge/linux-64::gtk3-3.24.33-h13ada96_1
hicolor-icon-theme conda-forge/linux-64::hicolor-icon-theme-0.17-ha770c72_2
libcups conda-forge/linux-64::libcups-2.3.3-hf5a7f15_1
libgirepository conda-forge/linux-64::libgirepository-1.72.0-h26ff761_1
nbclient conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0
prometheus_client conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0
pycairo conda-forge/linux-64::pycairo-1.24.0-py39hc92de75_0
pygobject conda-forge/linux-64::pygobject-3.42.1-py39habf54e5_0
pyjwt conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0
sigcpp-2.0 conda-forge/linux-64::sigcpp-2.0-2.10.8-h27087fc_0
sparsehash conda-forge/linux-64::sparsehash-2.0.4-hcb278e6_1
xorg-libxtst conda-forge/linux-64::xorg-libxtst-1.2.3-h7f98852_1002
xorg-recordproto conda-forge/linux-64::xorg-recordproto-1.14.2-h7f98852_1002
zstandard conda-forge/linux-64::zstandard-0.19.0-py39h6e5214e_2
The following packages will be UPDATED:
ca-certificates 2022.9.24-ha878542_0 --> 2023.7.22-hbcca054_0
certifi 2022.9.24-pyhd8ed1ab_0 --> 2023.7.22-pyhd8ed1ab_0
openssl 1.1.1s-h166bdaf_0 --> 1.1.1v-hd590300_0
requests 2.27.1-pyhd8ed1ab_0 --> 2.31.0-pyhd8ed1ab_0
The following packages will be DOWNGRADED:
abseil-cpp 20211102.0-h93e1e8c_3 --> 20211102.0-h93e1e8c_2
libabseil 20211102.0-cxx17_h48a1fff_3 --> 20211102.0-cxx17_h48a1fff_2
Preparing transaction: ...working... done
Verifying transaction: ...working... INFO conda.core.link:verify(245): [ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__init__.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/bridge/__init__.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/bridge/graphite.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/context_managers.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/core.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/decorator.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/exposition.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/gc_collector.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/metrics.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/metrics_core.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/mmap_dict.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/multiprocess.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/openmetrics/__init__.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/openmetrics/exposition.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/openmetrics/parser.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/parser.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/platform_collector.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/process_collector.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/registry.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/samples.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/twisted/__init__.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/twisted/_exposition.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/utils.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/values.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/__init__.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/bridge/__pycache__/__init__.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/bridge/__pycache__/graphite.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/context_managers.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/core.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/decorator.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/exposition.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/gc_collector.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/metrics.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/metrics_core.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/mmap_dict.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/multiprocess.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/openmetrics/__pycache__/__init__.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/openmetrics/__pycache__/exposition.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/openmetrics/__pycache__/parser.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/parser.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/platform_collector.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/process_collector.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/registry.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/samples.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/twisted/__pycache__/__init__.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/twisted/__pycache__/_exposition.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/utils.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::prometheus_client-0.17.1-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/prometheus_client/__pycache__/values.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__init__.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/algorithms.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/api_jws.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/api_jwt.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/exceptions.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/help.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/utils.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__pycache__/__init__.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__pycache__/algorithms.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__pycache__/api_jws.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__pycache__/api_jwt.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__pycache__/exceptions.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__pycache__/help.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::pyjwt-2.8.0-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/jwt/__pycache__/utils.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/linux-64::gtk3-3.24.33-h13ada96_1' cannot be installed due to a
path collision for 'bin/gtk-update-icon-cache'.
This path already exists in the target prefix, and it won't be removed by
an uninstall action in this transaction. The path appears to be coming from
the package 'conda-forge/linux-64::gtk2-2.24.33-h90689f9_2', which is already installed in the prefix.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'bin/jupyter-execute'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__init__.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/_version.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/cli.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/client.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/exceptions.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/jsonutil.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/output_widget.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/py.typed'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/__init__.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/base.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/conftest.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/fake_kernelmanager.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Autokill.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Check History in Memory.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Clear Output.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Disable Stdin.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Empty Cell.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Error.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Factorials.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/HelloWorld.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Inline Image.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Interrupt.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/JupyterWidgets.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Other Comms.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Output.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Parallel Execute A.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Parallel Execute B.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/SVG.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Skip Exceptions with Cell Tags.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Skip Exceptions.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Skip Execution with Cell Tag.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Sleep1s.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/Unicode.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/UnicodePy3.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/python.png'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/files/update-display-id.ipynb'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/test_client.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/test_util.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/util.py'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/__init__.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/_version.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/cli.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/client.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/exceptions.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/jsonutil.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/output_widget.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/__pycache__/__init__.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/__pycache__/base.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/__pycache__/conftest.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/__pycache__/fake_kernelmanager.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/__pycache__/test_client.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/tests/__pycache__/test_util.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
, ClobberError: The package 'conda-forge/noarch::nbclient-0.6.4-pyhd8ed1ab_0' cannot be installed due to a
path collision for 'lib/python3.9/site-packages/nbclient/__pycache__/util.cpython-39.pyc'.
This path already exists in the target prefix, and it won't be removed
by an uninstall action in this transaction. The path is one that conda
doesn't recognize. It may have been created by another package manager.
]
done
Executing transaction: ...working... INFO conda.core.link:_execute_actions(766): ===> UNLINKING PACKAGE: conda-forge/noarch::requests-2.27.1-pyhd8ed1ab_0 <===
prefix=/opt/conda
INFO conda.core.link:_execute_actions(766): ===> UNLINKING PACKAGE: conda-forge/noarch::certifi-2022.9.24-pyhd8ed1ab_0 <===
prefix=/opt/conda
INFO conda.core.link:_execute_actions(766): ===> UNLINKING PACKAGE: conda-forge/linux-64::abseil-cpp-20211102.0-h93e1e8c_3 <===
prefix=/opt/conda
INFO conda.core.link:_execute_actions(766): ===> UNLINKING PACKAGE: conda-forge/linux-64::openssl-1.1.1s-h166bdaf_0 <===
prefix=/opt/conda
INFO conda.core.link:_execute_actions(766): ===> UNLINKING PACKAGE: conda-forge/linux-64::libabseil-20211102.0-cxx17_h48a1fff_3 <===
prefix=/opt/conda
INFO conda.core.link:_execute_actions(766): ===> UNLINKING PACKAGE: conda-forge/linux-64::ca-certificates-2022.9.24-ha878542_0 <===
prefix=/opt/conda
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::ca-certificates-2023.7.22-hbcca054_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/ca-certificates-2023.7.22-hbcca054_0
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::hicolor-icon-theme-0.17-ha770c72_2 <===
prefix=/opt/conda
source=/opt/conda/pkgs/hicolor-icon-theme-0.17-ha770c72_2
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::epoxy-1.5.10-h166bdaf_1 <===
prefix=/opt/conda
source=/opt/conda/pkgs/epoxy-1.5.10-h166bdaf_1
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::libabseil-20211102.0-cxx17_h48a1fff_2 <===
prefix=/opt/conda
source=/opt/conda/pkgs/libabseil-20211102.0-cxx17_h48a1fff_2
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::openssl-1.1.1v-hd590300_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/openssl-1.1.1v-hd590300_0
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::sigcpp-2.0-2.10.8-h27087fc_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/sigcpp-2.0-2.10.8-h27087fc_0
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::sparsehash-2.0.4-hcb278e6_1 <===
prefix=/opt/conda
source=/opt/conda/pkgs/sparsehash-2.0.4-hcb278e6_1
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::xorg-recordproto-1.14.2-h7f98852_1002 <===
prefix=/opt/conda
source=/opt/conda/pkgs/xorg-recordproto-1.14.2-h7f98852_1002
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::abseil-cpp-20211102.0-h93e1e8c_2 <===
prefix=/opt/conda
source=/opt/conda/pkgs/abseil-cpp-20211102.0-h93e1e8c_2
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::boost-cpp-1.74.0-h6cacc03_7 <===
prefix=/opt/conda
source=/opt/conda/pkgs/boost-cpp-1.74.0-h6cacc03_7
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::libcups-2.3.3-hf5a7f15_1 <===
prefix=/opt/conda
source=/opt/conda/pkgs/libcups-2.3.3-hf5a7f15_1
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::certifi-2023.7.22-pyhd8ed1ab_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/certifi-2023.7.22-pyhd8ed1ab_0
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::prometheus_client-0.17.1-pyhd8ed1ab_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/prometheus_client-0.17.1-pyhd8ed1ab_0
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/__init__.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/bridge/__init__.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/bridge/graphite.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/context_managers.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/core.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/decorator.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/exposition.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/gc_collector.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/metrics.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/metrics_core.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/mmap_dict.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/multiprocess.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/openmetrics/__init__.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/openmetrics/exposition.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/openmetrics/parser.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/parser.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/platform_collector.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/process_collector.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/registry.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/samples.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/twisted/__init__.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/twisted/_exposition.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/utils.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/prometheus_client/values.py'
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::pyjwt-2.8.0-pyhd8ed1ab_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/pyjwt-2.8.0-pyhd8ed1ab_0
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/jwt/__init__.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/jwt/algorithms.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/jwt/api_jws.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/jwt/api_jwt.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/jwt/exceptions.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/jwt/help.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/jwt/utils.py'
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::xorg-libxtst-1.2.3-h7f98852_1002 <===
prefix=/opt/conda
source=/opt/conda/pkgs/xorg-libxtst-1.2.3-h7f98852_1002
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::at-spi2-core-2.40.3-h0630a04_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/at-spi2-core-2.40.3-h0630a04_0
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::boost-1.74.0-py39h5472131_5 <===
prefix=/opt/conda
source=/opt/conda/pkgs/boost-1.74.0-py39h5472131_5
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::cairomm-1.0-1.12.2-h96a316c_4 <===
prefix=/opt/conda
source=/opt/conda/pkgs/cairomm-1.0-1.12.2-h96a316c_4
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::libgirepository-1.72.0-h26ff761_1 <===
prefix=/opt/conda
source=/opt/conda/pkgs/libgirepository-1.72.0-h26ff761_1
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::pycairo-1.24.0-py39hc92de75_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/pycairo-1.24.0-py39hc92de75_0
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::zstandard-0.19.0-py39h6e5214e_2 <===
prefix=/opt/conda
source=/opt/conda/pkgs/zstandard-0.19.0-py39h6e5214e_2
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::at-spi2-atk-2.38.0-h0630a04_3 <===
prefix=/opt/conda
source=/opt/conda/pkgs/at-spi2-atk-2.38.0-h0630a04_3
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::cairomm-1.12.2-ha770c72_4 <===
prefix=/opt/conda
source=/opt/conda/pkgs/cairomm-1.12.2-ha770c72_4
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::graph-tool-base-2.45-py39h2fa1a2d_2 <===
prefix=/opt/conda
source=/opt/conda/pkgs/graph-tool-base-2.45-py39h2fa1a2d_2
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::pygobject-3.42.1-py39habf54e5_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/pygobject-3.42.1-py39habf54e5_0
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::gtk3-3.24.33-h13ada96_1 <===
prefix=/opt/conda
source=/opt/conda/pkgs/gtk3-3.24.33-h13ada96_1
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/bin/gtk-update-icon-cache'
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::nbclient-0.6.4-pyhd8ed1ab_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/nbclient-0.6.4-pyhd8ed1ab_0
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/bin/jupyter-execute'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/__init__.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/_version.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/cli.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/client.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/exceptions.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/jsonutil.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/output_widget.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/py.typed'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/__init__.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/base.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/conftest.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/fake_kernelmanager.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Autokill.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Check History in Memory.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Clear Output.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Disable Stdin.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Empty Cell.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Error.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Factorials.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/HelloWorld.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Inline Image.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Interrupt.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/JupyterWidgets.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Other Comms.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Output.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Parallel Execute A.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Parallel Execute B.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/SVG.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Skip Exceptions with Cell Tags.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Skip Exceptions.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Skip Execution with Cell Tag.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Sleep1s.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/Unicode.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/UnicodePy3.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/python.png'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/files/update-display-id.ipynb'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/test_client.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/tests/test_util.py'
INFO conda.gateways.disk.create:create_link(353): file exists, but clobbering: '/opt/conda/lib/python3.9/site-packages/nbclient/util.py'
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::graph-tool-2.45-py39h7b6895e_2 <===
prefix=/opt/conda
source=/opt/conda/pkgs/graph-tool-2.45-py39h7b6895e_2
INFO conda.core.link:_execute_actions(771): ===> LINKING PACKAGE: conda-forge::requests-2.31.0-pyhd8ed1ab_0 <===
prefix=/opt/conda
source=/opt/conda/pkgs/requests-2.31.0-pyhd8ed1ab_0
done
*** finish!!!
Requirement already satisfied: scipy in /opt/conda/lib/python3.9/site-packages (1.7.3)
Collecting scipy
Using cached scipy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (36.5 MB)
Requirement already satisfied: networkx in /opt/conda/lib/python3.9/site-packages (2.8.7)
Collecting networkx
Using cached networkx-3.1-py3-none-any.whl (2.1 MB)
Requirement already satisfied: numpy<1.28.0,>=1.21.6 in /opt/conda/lib/python3.9/site-packages (from scipy) (1.21.6)
Installing collected packages: scipy, networkx
Attempting uninstall: scipy
Found existing installation: scipy 1.7.3
Uninstalling scipy-1.7.3:
Successfully uninstalled scipy-1.7.3
Attempting uninstall: networkx
Found existing installation: networkx 2.8.7
Uninstalling networkx-2.8.7:
Successfully uninstalled networkx-2.8.7
ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.
pointpats 2.2.0 requires opencv-contrib-python>=4.2.0, which is not installed.
pymc3 3.11.5 requires scipy<1.8.0,>=1.7.3, but you have scipy 1.11.2 which is incompatible.
Successfully installed networkx-3.1 scipy-1.11.2
networkx: 3.1
graph-tool: 2.45 (commit 71a3b6d2, )
g_gt = gt.Graph(directed=True)
v_A = g_gt.add_vertex()
v_B = g_gt.add_vertex()
v_C = g_gt.add_vertex()
v_D = g_gt.add_vertex()
v_E = g_gt.add_vertex()
v_F = g_gt.add_vertex()
g_gt.vp.name = g_gt.new_vertex_property("string")
g_gt.vp.name[v_A] = "A"
g_gt.vp.name[v_B] = "B"
g_gt.vp.name[v_C] = "C"
g_gt.vp.name[v_D] = "D"
g_gt.vp.name[v_E] = "E"
g_gt.vp.name[v_F] = "F"
e_BA = g_gt.add_edge(v_B, v_A)
e_CA = g_gt.add_edge(v_C, v_A)
e_BC = g_gt.add_edge(v_B, v_C)
e_ED = g_gt.add_edge(v_E, v_D)
e_FD = g_gt.add_edge(v_F, v_D)
e_EF = g_gt.add_edge(v_E, v_F)
e_AD = g_gt.add_edge(v_A, v_D)
g_gt.ep.weight = g_gt.new_edge_property("double")
g_gt.ep.weight[e_BA] = 10
g_gt.ep.weight[e_CA] = 20
g_gt.ep.weight[e_BC] = 30
g_gt.ep.weight[e_ED] = 10
g_gt.ep.weight[e_FD] = 20
g_gt.ep.weight[e_EF] = 30
g_gt.ep.weight[e_AD] = 100
deg = g_gt.degree_property_map('in')
deg.a = 30*deg.a
gt.graph_draw(g_gt,
vertex_text = g_gt.properties[('v', 'name')], vertex_size=deg,
edge_text = g_gt.properties[('e', 'weight')])
<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc85dfc9700, at 0x7fc85dfce520>
g_nx = nx.DiGraph()
g_nx.add_weighted_edges_from([('B', 'A', 10), ('C', 'A', 20), ('B', 'C', 30),
('E', 'D', 10), ('F', 'D', 20), ('E', 'F', 30),
('A', 'D', 100)])
nx.draw(g_nx, with_labels=True)
print(nx.is_directed(g_nx))
print(nx.is_weighted(g_nx))
print(nx.number_of_nodes(g_nx))
print(nx.number_of_edges(g_nx))
print(nx.density(g_nx))
print(g_nx.in_degree('A'))
print(g_nx.out_degree('A'))
print(g_nx.degree('A'))
print(g_nx.in_degree('A', weight='weight'))
print(g_nx.out_degree('A', weight='weight'))
print(g_nx.degree('A', weight='weight'))
print(nx.is_weakly_connected(g_nx))
print(nx.is_strongly_connected(g_nx))
print(nx.average_shortest_path_length(g_nx.to_undirected(), weight='weight'))
def nx_diameter_with_weight(G, weight_str):
diameters = []
for node in G.nodes():
shortest_paths = nx.single_source_dijkstra(G, source=node, weight=weight_str)[0]
max_shortest_path = max(shortest_paths.values())
diameters.append(max_shortest_path)
return max(diameters)
print(nx_diameter_with_weight(g_nx.to_undirected(), 'weight'))
print(nx.clustering(g_nx, 'A'))
True True 6 7 0.23333333333333334 2 1 3 30 100 130 True False 80.0 140 0.16666666666666666
# SBMs
g_gt = gt.collection.data["football"]
state = gt.minimize_blockmodel_dl(g_gt)
state.draw(pos=g_gt.vp.pos)
b = gt.contiguous_map(state.get_blocks())
state = state.copy(b=b)
e = state.get_matrix()
B = state.get_nonempty_B()
plt.matshow(e.todense()[:B, :B], cmap='viridis')
plt.xlabel('Group r')
plt.ylabel('Group s')
plt.colorbar()
plt.show()
## data
zip_path = 'FNA network.zip'
dir = 'FNA network'
if not os.path.exists(dir):
print('*** unzip data.zip')
with zp.ZipFile(zip_path) as arc:
arc.extractall()
file_list = os.listdir(dir)
file_list.sort()
print(len(file_list))
file_list
27
['20150331.csv', '20150630.csv', '20150930.csv', '20151231.csv', '20160331.csv', '20160630.csv', '20160930.csv', '20161231.csv', '20170331.csv', '20170630.csv', '20170930.csv', '20171231.csv', '20180331.csv', '20180630.csv', '20180930.csv', '20181231.csv', '20190331.csv', '20190630.csv', '20190930.csv', '20191231.csv', '20200331.csv', '20200630.csv', '20200930.csv', '20201231.csv', '20210331.csv', '20210630.csv', '20210930.csv']
time_list = []
for file_name in file_list:
time_list.append(file_name.replace('.csv',''))
# pd_inspect
pd_inspect = pd.read_csv(dir + '/' + file_list[0])
print(pd_inspect.shape)
print(pd_inspect.columns)
pd_inspect.head()
(406, 21)
Index(['net_id', 'arc_id', 'from_id', 'to_id', 'borrower', 'claims_held',
'claims_held_10_change', 'claims_held_10_change_max',
'claims_held_10_change_min', 'claims_held_2_change',
'claims_held_2_change_max', 'claims_held_2_change_min',
'claims_held_5_change', 'claims_held_5_change_max',
'claims_held_5_change_min', 'claims_held_max', 'claims_held_min',
'lender', 'share_of_claims_held', 'share_of_claims_held_max',
'share_of_claims_held_min'],
dtype='object')
| net_id | arc_id | from_id | to_id | borrower | claims_held | claims_held_10_change | claims_held_10_change_max | claims_held_10_change_min | claims_held_2_change | ... | claims_held_2_change_min | claims_held_5_change | claims_held_5_change_max | claims_held_5_change_min | claims_held_max | claims_held_min | lender | share_of_claims_held | share_of_claims_held_max | share_of_claims_held_min | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 2015/3/31 | AT-AU-0 | AT | AU | Austria | 5.220850e+08 | NaN | NaN | NaN | 63385000.0 | ... | -1.244000e+09 | -1.586615e+09 | 6.050000e+08 | -2.894000e+09 | 3.436000e+09 | 4.450000e+08 | Australia | 0.005037 | 0.014129 | 0.003330 |
| 1 | 2015/3/31 | AT-BE-0 | AT | BE | Austria | 1.750940e+09 | NaN | NaN | NaN | 87716000.0 | ... | -3.459000e+09 | -1.177284e+09 | 1.641000e+09 | -3.771637e+09 | 6.063000e+09 | 1.550000e+09 | Belgium | 0.016893 | 0.029052 | 0.011835 |
| 2 | 2015/3/31 | AT-CA-0 | AT | CA | Austria | 8.363850e+08 | NaN | NaN | NaN | -122939000.0 | ... | -6.610000e+08 | -8.139390e+08 | 8.910000e+08 | -1.297000e+09 | 2.204000e+09 | 7.510000e+08 | Canada | 0.008069 | 0.010148 | 0.005185 |
| 3 | 2015/3/31 | AT-CH-0 | AT | CH | Austria | 6.733410e+09 | NaN | NaN | NaN | 551087000.0 | ... | -6.296000e+09 | -6.938913e+09 | 6.476000e+09 | -6.387000e+09 | 1.312600e+10 | 5.288694e+09 | Switzerland | 0.064963 | 0.081206 | 0.037446 |
| 4 | 2015/3/31 | AT-CL-0 | AT | CL | Austria | 1.416100e+07 | NaN | NaN | NaN | -7430000.0 | ... | -1.780000e+08 | -4.543000e+07 | 3.300000e+07 | -1.740000e+08 | 2.390000e+08 | 7.000000e+06 | Chile | 0.000137 | 0.001536 | 0.000051 |
5 rows × 21 columns
print("borrower: len=" + str(len(pd_inspect['borrower'].unique())) +
", array=" + np.array2string(pd_inspect['borrower'].unique()))
print("lender: len=" + str(len(pd_inspect['lender'].unique())) +
", array=" + np.array2string(pd_inspect['lender'].unique()))
print("arc_id: len=" + str(len(pd_inspect['arc_id'].unique())))
borrower: len=21, array=['Austria' 'Australia' 'Belgium' 'Canada' 'Switzerland' 'Chile' 'Germany' 'Spain' 'Finland' 'France' 'United_Kingdom' 'Greece' 'Ireland' 'Italy' 'Japan' 'South_Korea' 'Netherlands' 'Portugal' 'Sweden' 'Turkey' 'United_States'] lender: len=22, array=['Australia' 'Belgium' 'Canada' 'Switzerland' 'Chile' 'Germany' 'Spain' 'Finland' 'France' 'United_Kingdom' 'Greece' 'Ireland' 'India' 'Italy' 'Japan' 'South_Korea' 'Netherlands' 'Portugal' 'Sweden' 'Turkey' 'United_States' 'Austria'] arc_id: len=406
# pd_list: we only consider claims_held/USD
columns_key = ['arc_id','from_id','to_id','borrower','lender']
pd_claims_held_dict = {}
for one_time in time_list:
pd_claims_held_dict[one_time] = pd.read_csv(dir + '/' + one_time + '.csv')[columns_key + ['claims_held']]
print(pd_claims_held_dict[time_list[0]].shape)
pd_claims_held_dict[time_list[0]].head()
(406, 6)
| arc_id | from_id | to_id | borrower | lender | claims_held | |
|---|---|---|---|---|---|---|
| 0 | AT-AU-0 | AT | AU | Austria | Australia | 5.220850e+08 |
| 1 | AT-BE-0 | AT | BE | Austria | Belgium | 1.750940e+09 |
| 2 | AT-CA-0 | AT | CA | Austria | Canada | 8.363850e+08 |
| 3 | AT-CH-0 | AT | CH | Austria | Switzerland | 6.733410e+09 |
| 4 | AT-CL-0 | AT | CL | Austria | Chile | 1.416100e+07 |
# pd_merge
pd_claims_held_merge = pd_claims_held_dict[time_list[0]].rename(columns={'claims_held':time_list[0]})
for i in range(1, len(time_list)):
pd_claims_held_merge = pd_claims_held_merge.merge(
pd_claims_held_dict[time_list[i]].rename(columns={'claims_held':time_list[i]}), how='outer', on=columns_key)
print(pd_claims_held_merge.shape)
pd_claims_held_merge.head()
(417, 32)
| arc_id | from_id | to_id | borrower | lender | 20150331 | 20150630 | 20150930 | 20151231 | 20160331 | ... | 20190630 | 20190930 | 20191231 | 20200331 | 20200630 | 20200930 | 20201231 | 20210331 | 20210630 | 20210930 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | AT-AU-0 | AT | AU | Austria | Australia | 5.220850e+08 | 5.084990e+08 | 4.856030e+08 | 4.644340e+08 | 5.175790e+08 | ... | 7.276380e+08 | 7.059820e+08 | 6.804890e+08 | 6.906940e+08 | 8.673920e+08 | 7.464550e+08 | 7.066430e+08 | 5.567450e+08 | 5.362950e+08 | 4.686770e+08 |
| 1 | AT-BE-0 | AT | BE | Austria | Belgium | 1.750940e+09 | 1.849885e+09 | 1.628480e+09 | 1.658547e+09 | 1.913879e+09 | ... | 1.779613e+09 | 2.025959e+09 | 2.038951e+09 | 2.471685e+09 | 2.676054e+09 | 2.576757e+09 | 2.456031e+09 | 2.479068e+09 | 2.659388e+09 | 2.712196e+09 |
| 2 | AT-CA-0 | AT | CA | Austria | Canada | 8.363850e+08 | 7.752660e+08 | 8.305000e+08 | 7.912120e+08 | 1.103028e+09 | ... | 1.371020e+09 | 1.261644e+09 | 1.325207e+09 | 1.526138e+09 | 1.733183e+09 | 1.750037e+09 | 1.841479e+09 | 1.791729e+09 | 2.011624e+09 | 1.983479e+09 |
| 3 | AT-CH-0 | AT | CH | Austria | Switzerland | 6.733410e+09 | 6.627628e+09 | 6.614905e+09 | 5.406037e+09 | 6.069540e+09 | ... | 1.341007e+10 | 1.141622e+10 | 8.002445e+09 | 1.069121e+10 | 9.524585e+09 | 9.707074e+09 | 9.208257e+09 | 9.387236e+09 | 8.534151e+09 | 8.151934e+09 |
| 4 | AT-CL-0 | AT | CL | Austria | Chile | 1.416100e+07 | 1.679800e+07 | 1.669500e+07 | 2.351900e+07 | 2.146300e+07 | ... | 2.170100e+07 | 2.030500e+07 | 7.084000e+07 | 7.724400e+07 | 3.058500e+07 | 1.125200e+08 | 4.499800e+07 | 4.861500e+07 | 8.117600e+07 | 7.893600e+07 |
5 rows × 32 columns
print("borrower: len=" + str(len(pd_claims_held_merge['borrower'].unique())) +
", array=" + np.array2string(pd_claims_held_merge['borrower'].unique()))
print("lender: len=" + str(len(pd_claims_held_merge['lender'].unique())) +
", array=" + np.array2string(pd_claims_held_merge['lender'].unique()))
print("arc_id: len=" + str(len(pd_claims_held_merge['arc_id'].unique())))
borrower: len=21, array=['Austria' 'Australia' 'Belgium' 'Canada' 'Switzerland' 'Chile' 'Germany' 'Spain' 'Finland' 'France' 'United_Kingdom' 'Greece' 'Ireland' 'Italy' 'Japan' 'South_Korea' 'Netherlands' 'Portugal' 'Sweden' 'Turkey' 'United_States'] lender: len=22, array=['Australia' 'Belgium' 'Canada' 'Switzerland' 'Chile' 'Germany' 'Spain' 'Finland' 'France' 'United_Kingdom' 'Greece' 'Ireland' 'India' 'Italy' 'Japan' 'South_Korea' 'Netherlands' 'Portugal' 'Sweden' 'Turkey' 'United_States' 'Austria'] arc_id: len=417
pd_claims_held_merge.describe()
| 20150331 | 20150630 | 20150930 | 20151231 | 20160331 | 20160630 | 20160930 | 20161231 | 20170331 | 20170630 | ... | 20190630 | 20190930 | 20191231 | 20200331 | 20200630 | 20200930 | 20201231 | 20210331 | 20210630 | 20210930 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 4.060000e+02 | 4.040000e+02 | 4.020000e+02 | 4.000000e+02 | 4.030000e+02 | 4.030000e+02 | 4.000000e+02 | 4.020000e+02 | 3.990000e+02 | 4.000000e+02 | ... | 4.040000e+02 | 4.080000e+02 | 4.060000e+02 | 4.090000e+02 | 4.080000e+02 | 4.060000e+02 | 4.080000e+02 | 4.050000e+02 | 4.070000e+02 | 4.070000e+02 |
| mean | 3.896289e+10 | 3.901076e+10 | 3.891243e+10 | 3.747723e+10 | 3.899392e+10 | 3.907801e+10 | 3.890906e+10 | 3.724084e+10 | 3.936975e+10 | 4.033251e+10 | ... | 4.290681e+10 | 4.239769e+10 | 4.212174e+10 | 4.559877e+10 | 4.526763e+10 | 4.642467e+10 | 4.743614e+10 | 4.938093e+10 | 4.875622e+10 | 4.882246e+10 |
| std | 1.150953e+11 | 1.145297e+11 | 1.137052e+11 | 1.122800e+11 | 1.170230e+11 | 1.190124e+11 | 1.191671e+11 | 1.171589e+11 | 1.217479e+11 | 1.220391e+11 | ... | 1.347591e+11 | 1.366200e+11 | 1.376339e+11 | 1.541051e+11 | 1.467481e+11 | 1.483425e+11 | 1.516806e+11 | 1.610586e+11 | 1.576152e+11 | 1.614671e+11 |
| min | 3.400000e+04 | 3.500000e+04 | 1.700000e+04 | 1.600000e+04 | 2.400000e+04 | 3.000000e+03 | 1.900000e+04 | 2.000000e+03 | 2.000000e+03 | 1.600000e+04 | ... | 3.200000e+04 | 1.000000e+03 | 6.000000e+03 | 5.000000e+03 | 2.000000e+03 | 2.500000e+04 | 2.000000e+03 | 2.000000e+03 | 8.000000e+03 | 4.000000e+03 |
| 25% | 4.332500e+08 | 4.781960e+08 | 5.032530e+08 | 5.098255e+08 | 4.971165e+08 | 4.590000e+08 | 5.229000e+08 | 4.096500e+08 | 4.568110e+08 | 4.260468e+08 | ... | 4.666802e+08 | 4.887218e+08 | 6.085820e+08 | 4.748340e+08 | 4.935000e+08 | 5.412948e+08 | 5.079855e+08 | 5.163610e+08 | 6.380995e+08 | 5.060000e+08 |
| 50% | 3.905123e+09 | 4.170236e+09 | 4.025781e+09 | 4.016555e+09 | 4.500000e+09 | 4.102000e+09 | 4.315762e+09 | 3.635000e+09 | 4.241088e+09 | 4.421241e+09 | ... | 5.037840e+09 | 5.039014e+09 | 5.142062e+09 | 5.083272e+09 | 4.759000e+09 | 5.074405e+09 | 4.720122e+09 | 4.948706e+09 | 5.030000e+09 | 4.564000e+09 |
| 75% | 2.505605e+10 | 2.512682e+10 | 2.536450e+10 | 2.596236e+10 | 2.553878e+10 | 2.469195e+10 | 2.536008e+10 | 2.487500e+10 | 2.704789e+10 | 2.822338e+10 | ... | 2.934108e+10 | 2.797351e+10 | 2.761575e+10 | 2.877100e+10 | 2.916425e+10 | 3.123221e+10 | 3.055900e+10 | 3.444372e+10 | 3.307950e+10 | 3.294450e+10 |
| max | 1.402600e+12 | 1.367110e+12 | 1.419200e+12 | 1.480200e+12 | 1.562440e+12 | 1.574080e+12 | 1.632530e+12 | 1.621990e+12 | 1.664630e+12 | 1.602110e+12 | ... | 1.720280e+12 | 1.782080e+12 | 1.814070e+12 | 2.083960e+12 | 1.929570e+12 | 1.928240e+12 | 1.970800e+12 | 2.139220e+12 | 2.004560e+12 | 2.062660e+12 |
8 rows × 27 columns
# we mainly consider mean, 50% and max in a time line
describe_focus = ['mean','50%','max']
pd_claims_held_merge_describe = pd_claims_held_merge.describe().T
fig, axs = plt.subplots(len(describe_focus),figsize=(7,6))
for i in range(0, len(describe_focus)):
axs[i].plot(pd_claims_held_merge_describe[describe_focus[i]], label=describe_focus[i])
axs[i].set_title(describe_focus[i])
axs[i].xaxis.set_tick_params(rotation=45)
axs[i].legend()
plt.tight_layout()
plt.show()
graphml_dir = 'FNA graphml'
if not os.path.exists(graphml_dir):
print('*** generate graphml')
os.makedirs(graphml_dir)
for one_time in time_list:
pd_claims_held_dict[one_time]['claims_held']/=10**9
G = nx.from_pandas_edgelist(
pd_claims_held_dict[one_time],
source = 'lender', target = 'borrower',
edge_attr='claims_held', create_using=nx.DiGraph())
# add reciprocal
for src, tar, wgt in G.edges.data('claims_held'):
G.edges[src, tar]['claims_held_reciprocal'] = 1 / wgt
nx.write_graphml_lxml(G, graphml_dir + "/" + one_time + ".graphml")
print("*** read graphml for nx")
G_nxs = {}
for one_time in time_list:
G = nx.read_graphml(graphml_dir + "/" + one_time + ".graphml")
print("nx time: " + one_time + ", nodes: " + str(G.number_of_nodes()) + ", edges: " + str(G.number_of_edges()))
G_nxs[one_time] = G
print("*** read graphml for gt")
G_gts = {}
for one_time in time_list:
G = gt.load_graph(graphml_dir + "/" + one_time + ".graphml")
print("gt time: " + one_time + ", nodes: " + str(G.num_vertices()) + ", edges: " + str(G.num_edges()))
G_gts[one_time] = G
*** read graphml for nx nx time: 20150331, nodes: 22, edges: 406 nx time: 20150630, nodes: 22, edges: 404 nx time: 20150930, nodes: 22, edges: 402 nx time: 20151231, nodes: 22, edges: 400 nx time: 20160331, nodes: 22, edges: 403 nx time: 20160630, nodes: 22, edges: 403 nx time: 20160930, nodes: 22, edges: 400 nx time: 20161231, nodes: 22, edges: 402 nx time: 20170331, nodes: 22, edges: 399 nx time: 20170630, nodes: 22, edges: 400 nx time: 20170930, nodes: 22, edges: 401 nx time: 20171231, nodes: 22, edges: 402 nx time: 20180331, nodes: 22, edges: 401 nx time: 20180630, nodes: 22, edges: 403 nx time: 20180930, nodes: 22, edges: 405 nx time: 20181231, nodes: 22, edges: 402 nx time: 20190331, nodes: 22, edges: 407 nx time: 20190630, nodes: 22, edges: 404 nx time: 20190930, nodes: 22, edges: 408 nx time: 20191231, nodes: 22, edges: 406 nx time: 20200331, nodes: 22, edges: 409 nx time: 20200630, nodes: 22, edges: 408 nx time: 20200930, nodes: 22, edges: 406 nx time: 20201231, nodes: 22, edges: 408 nx time: 20210331, nodes: 22, edges: 405 nx time: 20210630, nodes: 22, edges: 407 nx time: 20210930, nodes: 22, edges: 407 *** read graphml for gt gt time: 20150331, nodes: 22, edges: 406 gt time: 20150630, nodes: 22, edges: 404 gt time: 20150930, nodes: 22, edges: 402 gt time: 20151231, nodes: 22, edges: 400 gt time: 20160331, nodes: 22, edges: 403 gt time: 20160630, nodes: 22, edges: 403 gt time: 20160930, nodes: 22, edges: 400 gt time: 20161231, nodes: 22, edges: 402 gt time: 20170331, nodes: 22, edges: 399 gt time: 20170630, nodes: 22, edges: 400 gt time: 20170930, nodes: 22, edges: 401 gt time: 20171231, nodes: 22, edges: 402 gt time: 20180331, nodes: 22, edges: 401 gt time: 20180630, nodes: 22, edges: 403 gt time: 20180930, nodes: 22, edges: 405 gt time: 20181231, nodes: 22, edges: 402 gt time: 20190331, nodes: 22, edges: 407 gt time: 20190630, nodes: 22, edges: 404 gt time: 20190930, nodes: 22, edges: 408 gt time: 20191231, nodes: 22, edges: 406 gt time: 20200331, nodes: 22, edges: 409 gt time: 20200630, nodes: 22, edges: 408 gt time: 20200930, nodes: 22, edges: 406 gt time: 20201231, nodes: 22, edges: 408 gt time: 20210331, nodes: 22, edges: 405 gt time: 20210630, nodes: 22, edges: 407 gt time: 20210930, nodes: 22, edges: 407
G_temp = G_nxs[time_list[0]]
# pos = nx.circular_layout(G_filt),
nx.draw(G_temp, with_labels=True,
node_color='#8b0000', node_size=700,
font_size=6, font_color='white', font_family='serif', font_weight='light',
arrowsize=5, edge_color='black', width=1)
print(nx.is_strongly_connected(G_temp))
print(nx.is_weakly_connected(G_temp))
False True
nx.number_strongly_connected_components(G_temp)
2
components = nx.strongly_connected_components(G_temp)
for c in components:
print(c)
{'Austria', 'United_Kingdom', 'Switzerland', 'South_Korea', 'Germany', 'Sweden', 'Chile', 'Spain', 'Portugal', 'Japan', 'Canada', 'Finland', 'Turkey', 'Australia', 'United_States', 'Ireland', 'Italy', 'France', 'Belgium', 'Greece', 'Netherlands'}
{'India'}
G_nxs_statistics = {}
for name,G_nx in G_nxs.items():
print("*** process ",name)
statistics = {}
# Number of nodes
statistics['nodes'] = nx.number_of_nodes(G_nx)
# Number of links
statistics['links'] = nx.number_of_edges(G_nx)
# Density
statistics['density'] = nx.density(G_nx)
# Average degrees (in-degree, out-degree, total degree)
statistics['avg_in_degree'] = sum([x[1] for x in G_nx.in_degree()])/statistics['nodes']
statistics['avg_out_degree'] = sum([x[1] for x in G_nx.out_degree()])/statistics['nodes']
statistics['avg_total_degree'] = sum([x[1] for x in G_nx.degree()])/statistics['nodes']
# Average strength (in-strength, out-strength, total strength)
statistics['avg_in_strength'] = sum([x[1] for x in G_nx.in_degree(weight='claims_held')])/statistics['nodes']
statistics['avg_out_strength'] = sum([x[1] for x in G_nx.out_degree(weight='claims_held')])/statistics['nodes']
statistics['avg_total_strength'] = sum([x[1] for x in G_nx.degree(weight='claims_held')])/statistics['nodes']
# Maximum strength (in-strength, out-strength, total strength)
statistics['max_in_strength'] = max([x[1] for x in G_nx.in_degree(weight='claims_held')])
statistics['max_out_strength'] = max([x[1] for x in G_nx.out_degree(weight='claims_held')])
statistics['max_total_strength'] = max([x[1] for x in G_nx.degree(weight='claims_held')])
# Average clustering coefficient
statistics['avg_clus_coef'] = nx.average_clustering(G_nx)
statistics['avg_clus_coef_weight'] = nx.average_clustering(G_nx, weight="claims_held")
# Average path length for G_strong
components = nx.strongly_connected_components(G_nx) # Identify all components.
G_strong = nx.subgraph(G_nx, max(components, key = len)) # Subgraph the largest componenet.
statistics['avg_path_length_for_strong'] = nx.average_shortest_path_length(G_strong)
statistics['avg_path_length_weight_for_strong'] = nx.average_shortest_path_length(G_strong, weight='claims_held')
# Diameter
statistics['diameter_for_strong'] = nx.diameter(G_strong)
statistics['diameter_weight_for_strong'] = nx_diameter_with_weight(G_strong, weight_str='claims_held')
G_nxs_statistics[name] = statistics
print("finish")
*** process 20150331 *** process 20150630 *** process 20150930 *** process 20151231 *** process 20160331 *** process 20160630 *** process 20160930 *** process 20161231 *** process 20170331 *** process 20170630 *** process 20170930 *** process 20171231 *** process 20180331 *** process 20180630 *** process 20180930 *** process 20181231 *** process 20190331 *** process 20190630 *** process 20190930 *** process 20191231 *** process 20200331 *** process 20200630 *** process 20200930 *** process 20201231 *** process 20210331 *** process 20210630 *** process 20210930 finish
pd.DataFrame.from_dict(G_nxs_statistics, orient='index').T
| 20150331 | 20150630 | 20150930 | 20151231 | 20160331 | 20160630 | 20160930 | 20161231 | 20170331 | 20170630 | ... | 20190630 | 20190930 | 20191231 | 20200331 | 20200630 | 20200930 | 20201231 | 20210331 | 20210630 | 20210930 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| nodes | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | ... | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 |
| links | 406.000000 | 404.000000 | 402.000000 | 400.000000 | 403.000000 | 403.000000 | 400.000000 | 402.000000 | 399.000000 | 400.000000 | ... | 404.000000 | 408.000000 | 406.000000 | 409.000000 | 408.000000 | 406.000000 | 408.000000 | 405.000000 | 407.000000 | 407.000000 |
| density | 0.878788 | 0.874459 | 0.870130 | 0.865801 | 0.872294 | 0.872294 | 0.865801 | 0.870130 | 0.863636 | 0.865801 | ... | 0.874459 | 0.883117 | 0.878788 | 0.885281 | 0.883117 | 0.878788 | 0.883117 | 0.876623 | 0.880952 | 0.880952 |
| avg_in_degree | 18.454545 | 18.363636 | 18.272727 | 18.181818 | 18.318182 | 18.318182 | 18.181818 | 18.272727 | 18.136364 | 18.181818 | ... | 18.363636 | 18.545455 | 18.454545 | 18.590909 | 18.545455 | 18.454545 | 18.545455 | 18.409091 | 18.500000 | 18.500000 |
| avg_out_degree | 18.454545 | 18.363636 | 18.272727 | 18.181818 | 18.318182 | 18.318182 | 18.181818 | 18.272727 | 18.136364 | 18.181818 | ... | 18.363636 | 18.545455 | 18.454545 | 18.590909 | 18.545455 | 18.454545 | 18.545455 | 18.409091 | 18.500000 | 18.500000 |
| avg_total_degree | 36.909091 | 36.727273 | 36.545455 | 36.363636 | 36.636364 | 36.636364 | 36.363636 | 36.545455 | 36.272727 | 36.363636 | ... | 36.727273 | 37.090909 | 36.909091 | 37.181818 | 37.090909 | 36.909091 | 37.090909 | 36.818182 | 37.000000 | 37.000000 |
| avg_in_strength | 719.042366 | 716.379387 | 711.036157 | 681.404122 | 714.297744 | 715.838051 | 707.437403 | 680.491755 | 714.024134 | 733.318375 | ... | 787.925090 | 786.284425 | 777.337604 | 847.722596 | 839.508807 | 856.746106 | 879.724690 | 909.058090 | 901.990000 | 903.215459 |
| avg_out_strength | 719.042366 | 716.379387 | 711.036157 | 681.404122 | 714.297744 | 715.838051 | 707.437403 | 680.491755 | 714.024134 | 733.318375 | ... | 787.925090 | 786.284425 | 777.337604 | 847.722596 | 839.508807 | 856.746106 | 879.724689 | 909.058090 | 901.990000 | 903.215459 |
| avg_total_strength | 1438.084732 | 1432.758773 | 1422.072315 | 1362.808243 | 1428.595488 | 1431.676103 | 1414.874806 | 1360.983510 | 1428.048267 | 1466.636749 | ... | 1575.850179 | 1572.568851 | 1554.675208 | 1695.445192 | 1679.017615 | 1713.492211 | 1759.449379 | 1818.116180 | 1803.980001 | 1806.430918 |
| max_in_strength | 2381.115900 | 2326.251300 | 2390.292600 | 2467.546100 | 2601.884100 | 2620.705300 | 2683.908000 | 2648.925800 | 2681.878700 | 2647.471400 | ... | 2896.305200 | 2945.025900 | 2992.358400 | 3229.681000 | 3112.945500 | 3158.931800 | 3285.426100 | 3437.434000 | 3277.325700 | 3307.011700 |
| max_out_strength | 5460.318715 | 5420.730515 | 5316.256447 | 5164.569164 | 5398.290632 | 5509.303256 | 5381.162725 | 5346.196242 | 5577.909380 | 5626.202025 | ... | 6177.127795 | 6272.336411 | 6290.562397 | 7169.467488 | 6719.901182 | 6733.283824 | 6803.039060 | 7217.514632 | 7029.859086 | 7294.908210 |
| max_total_strength | 7426.806715 | 7377.435515 | 7217.589447 | 6936.898164 | 7266.095632 | 7415.743256 | 7282.842725 | 7236.680242 | 7521.092380 | 7601.813025 | ... | 8315.813795 | 8396.423411 | 8260.562397 | 9463.306488 | 9020.535182 | 9029.639824 | 9131.186060 | 9818.741632 | 9664.494086 | 9908.429210 |
| avg_clus_coef | 0.918406 | 0.917345 | 0.912777 | 0.910329 | 0.915476 | 0.921938 | 0.915274 | 0.916467 | 0.913212 | 0.911958 | ... | 0.919148 | 0.920662 | 0.919822 | 0.922576 | 0.920903 | 0.920680 | 0.920628 | 0.921047 | 0.921228 | 0.922612 |
| avg_clus_coef_weight | 0.011140 | 0.011400 | 0.010927 | 0.010065 | 0.009951 | 0.009839 | 0.009490 | 0.008984 | 0.009305 | 0.009843 | ... | 0.009956 | 0.009522 | 0.009292 | 0.008604 | 0.009292 | 0.009548 | 0.009491 | 0.009025 | 0.009543 | 0.009153 |
| avg_path_length_for_strong | 1.073810 | 1.076190 | 1.080952 | 1.085714 | 1.078571 | 1.073810 | 1.080952 | 1.078571 | 1.083333 | 1.083333 | ... | 1.076190 | 1.069048 | 1.071429 | 1.066667 | 1.069048 | 1.071429 | 1.069048 | 1.071429 | 1.069048 | 1.069048 |
| avg_path_length_weight_for_strong | 1.460431 | 1.348494 | 1.326041 | 1.277487 | 1.257172 | 1.173444 | 1.268087 | 1.149238 | 1.264553 | 1.354689 | ... | 1.310787 | 1.329745 | 1.384722 | 1.525382 | 1.367426 | 1.276627 | 1.246829 | 1.307801 | 1.337465 | 1.360774 |
| diameter_for_strong | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | ... | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 |
| diameter_weight_for_strong | 16.712108 | 15.222612 | 15.507221 | 12.604031 | 11.869555 | 12.290150 | 11.677985 | 10.409927 | 11.661216 | 13.087702 | ... | 11.249915 | 10.524969 | 11.125905 | 12.981584 | 12.800612 | 13.101806 | 12.418850 | 12.693274 | 12.703218 | 14.041476 |
18 rows × 27 columns
pd.DataFrame.from_dict(G_nxs_statistics, orient='index').describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| nodes | 27.0 | 22.000000 | 0.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 | 22.000000 |
| links | 27.0 | 404.000000 | 2.935198 | 399.000000 | 402.000000 | 404.000000 | 406.500000 | 409.000000 |
| density | 27.0 | 0.874459 | 0.006353 | 0.863636 | 0.870130 | 0.874459 | 0.879870 | 0.885281 |
| avg_in_degree | 27.0 | 18.363636 | 0.133418 | 18.136364 | 18.272727 | 18.363636 | 18.477273 | 18.590909 |
| avg_out_degree | 27.0 | 18.363636 | 0.133418 | 18.136364 | 18.272727 | 18.363636 | 18.477273 | 18.590909 |
| avg_total_degree | 27.0 | 36.727273 | 0.266836 | 36.272727 | 36.545455 | 36.727273 | 36.954545 | 37.181818 |
| avg_in_strength | 27.0 | 775.640146 | 69.507916 | 680.491755 | 716.108719 | 766.387224 | 814.280785 | 909.058090 |
| avg_out_strength | 27.0 | 775.640146 | 69.507916 | 680.491755 | 716.108719 | 766.387224 | 814.280785 | 909.058090 |
| avg_total_strength | 27.0 | 1551.280292 | 139.015833 | 1360.983510 | 1432.217438 | 1532.774448 | 1628.561570 | 1818.116180 |
| max_in_strength | 27.0 | 2837.377944 | 308.274066 | 2326.251300 | 2648.198600 | 2736.680000 | 3068.365750 | 3437.434000 |
| max_out_strength | 27.0 | 6045.563033 | 654.391205 | 5164.569164 | 5484.810986 | 5878.186743 | 6505.231790 | 7294.908210 |
| max_total_strength | 27.0 | 8161.270737 | 874.440626 | 6936.898164 | 7421.274986 | 7994.035743 | 8708.479297 | 9908.429210 |
| avg_clus_coef | 27.0 | 0.917879 | 0.003762 | 0.910329 | 0.915375 | 0.918491 | 0.920792 | 0.922678 |
| avg_clus_coef_weight | 27.0 | 0.009799 | 0.000672 | 0.008604 | 0.009299 | 0.009724 | 0.010014 | 0.011400 |
| avg_path_length_for_strong | 27.0 | 1.075397 | 0.005485 | 1.066667 | 1.070238 | 1.076190 | 1.078571 | 1.085714 |
| avg_path_length_weight_for_strong | 27.0 | 1.310632 | 0.086950 | 1.145918 | 1.268590 | 1.307801 | 1.351592 | 1.525382 |
| diameter_for_strong | 27.0 | 2.000000 | 0.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 | 2.000000 |
| diameter_weight_for_strong | 27.0 | 12.194128 | 1.782026 | 9.102623 | 10.907103 | 12.290150 | 12.891098 | 16.712108 |
pd_temportal = pd.DataFrame.from_dict(G_nxs_statistics, orient='index').T
plt.figure(figsize = (24, 6))
plt.subplot(1, 3, 1)
plt.title('avg_total_strength')
plt.xticks(rotation = 80)
plt.plot(pd_temportal.columns.values, pd_temportal.loc['avg_total_strength'])
plt.subplot(1, 3, 2)
plt.title('max_in_strength')
plt.xticks(rotation = 80)
plt.plot(pd_temportal.columns.values, pd_temportal.loc['max_in_strength'])
plt.subplot(1, 3, 3)
plt.title('max_out_strength')
plt.xticks(rotation = 80)
plt.plot(pd_temportal.columns.values, pd_temportal.loc['max_out_strength'])
plt.show()
G_temp = G_nxs[time_list[0]]
def absolute_filter(G, k):
G_filtered = nx.subgraph_view(G, filter_edge = lambda s, t: G.edges[s, t]['claims_held'] >= k)
print(f"Filter loan amount >= {k:.0}:")
print(f"\t{len(G_filtered.edges)} of {len(G.edges)} edges remain.")
print(f"\tDensity: {nx.density(G_filtered):.3}")
print("\tPaths between any node pair:", nx.is_strongly_connected(G_filtered))
print("\tWeakly connected:", nx.is_weakly_connected(G_filtered))
return G_filtered
G_1e0 = absolute_filter(G_temp, 1e0)
G_1e1 = absolute_filter(G_temp, 1e1)
G_1e2 = absolute_filter(G_temp, 1e2)
G_1e3 = absolute_filter(G_temp, 1e3)
Filter loan amount >= 1e+00: 270 of 406 edges remain. Density: 0.584 Paths between any node pair: False Weakly connected: True Filter loan amount >= 1e+01: 160 of 406 edges remain. Density: 0.346 Paths between any node pair: False Weakly connected: True Filter loan amount >= 1e+02: 40 of 406 edges remain. Density: 0.0866 Paths between any node pair: False Weakly connected: False Filter loan amount >= 1e+03: 1 of 406 edges remain. Density: 0.00216 Paths between any node pair: False Weakly connected: False
# Identify disconnected countries.
isolates = set(nx.isolates(G_temp))
# Calculate total lending and borrowing.
country_size = sorted(
list(G_temp.degree(weight = 'claims_held')),
key = lambda pair: pair[1],
reverse = True)
plt.figure(figsize = (12, 4))
plt.title('Total Lending and Borrowing')
xticks = np.arange(len(country_size))
plt.bar(xticks, [0 if name in isolates else size for name, size in country_size])
plt.bar(xticks, [size if name in isolates else 0 for name, size in country_size])
plt.xticks(xticks, [name for name, size in country_size], rotation = 45)
plt.ylabel('Node Strength')
plt.show()
def plot_hist(degrees):
plt.hist([deg for _, deg in degrees])
plt.figure(figsize = (15, 3))
plt.subplot(1, 3, 1)
plt.title('In-strength distribution')
plot_hist(G_temp.in_degree(weight='claims_held'))
plt.subplot(1, 3, 2)
plt.title('Out-strength distribution')
plot_hist(G_temp.out_degree(weight='claims_held'))
plt.subplot(1, 3, 3)
plt.title('Total-strength distribution')
plot_hist(G_temp.degree(weight='claims_held'))
plt.show()
def top_n(deg_dist, n=5):
return [id for id, _ in sorted(deg_dist, key = lambda p: -p[1])][:n]
pd.DataFrame({
'In-strength': top_n(G_temp.in_degree(weight='claims_held'),10),
'Out-strength': top_n(G_temp.out_degree(weight='claims_held'),10),
'Total-strength': top_n(G_temp.degree(weight='claims_held'),10)
}).T
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | |
|---|---|---|---|---|---|---|---|---|---|---|
| In-strength | Japan | United_Kingdom | France | United_States | Germany | Switzerland | Canada | Spain | Netherlands | Italy |
| Out-strength | United_States | United_Kingdom | Germany | France | Japan | Italy | Netherlands | Spain | Australia | Canada |
| Total-strength | United_States | United_Kingdom | France | Japan | Germany | Switzerland | Canada | Spain | Netherlands | Italy |
score_closeness = nx.closeness_centrality(G_temp, distance='claims_held_reciprocal')
score_betweenness = nx.betweenness_centrality(G_temp, weight='claims_held_reciprocal')
pd.DataFrame({
'Closeness': list(score_closeness.values()),
'Betweenness': list(score_betweenness.values())
}).describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| Closeness | 22.0 | 27.279663 | 16.167909 | 0.0 | 10.985898 | 34.525204 | 41.838614 | 45.645066 |
| Betweenness | 22.0 | 0.071320 | 0.140220 | 0.0 | 0.000000 | 0.000000 | 0.069643 | 0.569048 |
plt.figure(figsize = (10, 3))
plt.suptitle('Centrality Distributions')
plt.subplot(1, 2, 1)
plt.hist(list(score_closeness.values()))
plt.xlabel('Closeness score')
plt.ylabel('Node frequency')
plt.subplot(1, 2, 2)
plt.hist(list(score_betweenness.values()))
plt.xlabel('Betweenness score')
plt.ylabel('Node frequency')
plt.show()
def count_zeros(name, dist):
n_zero = sum(1 for i in dist if i <= 0)
print(f"# nodes with zero {name}: {n_zero} ({n_zero/len(dist):.1%})")
count_zeros('closeness', score_closeness.values())
count_zeros('betweenness', score_betweenness.values())
# nodes with zero closeness: 1 (4.5%) # nodes with zero betweenness: 14 (63.6%)
pd.DataFrame({
'In-strength': top_n(G_temp.in_degree(weight='claims_held'),10),
'Out-strength': top_n(G_temp.out_degree(weight='claims_held'),10),
'Total-strength': top_n(G_temp.degree(weight='claims_held'),10),
'Closeness': top_n(score_closeness.items(),10),
'Betweenness': top_n(score_betweenness.items(),10)
}).T
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | |
|---|---|---|---|---|---|---|---|---|---|---|
| In-strength | Japan | United_Kingdom | France | United_States | Germany | Switzerland | Canada | Spain | Netherlands | Italy |
| Out-strength | United_States | United_Kingdom | Germany | France | Japan | Italy | Netherlands | Spain | Australia | Canada |
| Total-strength | United_States | United_Kingdom | France | Japan | Germany | Switzerland | Canada | Spain | Netherlands | Italy |
| Closeness | France | Germany | United_States | Japan | United_Kingdom | Canada | Switzerland | Spain | Sweden | Netherlands |
| Betweenness | United_States | United_Kingdom | France | Germany | Spain | Sweden | Netherlands | Italy | Australia | Austria |
G_temp = G_gts[time_list[0]]
print(G_temp.list_properties())
gt.graph_draw(G_temp,
vertex_text = G_temp.properties[('v', '_graphml_vertex_id')],
vertex_size = 10)
_graphml_vertex_id (vertex) (type: string) _graphml_edge_id (edge) (type: string) claims_held (edge) (type: long double) claims_held_reciprocal (edge) (type: long double) None
<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc8196c86d0, at 0x7fc80a1c4f10>
state = gt.minimize_nested_blockmodel_dl(G_temp,
state_args=dict(recs=[G_temp.properties[('e', 'claims_held')]], rec_types=["real-exponential"]))
print(state.entropy())
state.draw(vertex_text = G_temp.properties[('v', '_graphml_vertex_id')],
vertex_size = 10)
1956.166187611734
(<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc8196c86d0, at 0x7fc818df3be0>, <GraphView object, directed, with 26 vertices and 25 edges, edges filtered by (<EdgePropertyMap object with value type 'bool', for Graph 0x7fc818dfad00, at 0x7fc80a0cec10>, False), vertices filtered by (<VertexPropertyMap object with value type 'bool', for Graph 0x7fc818dfad00, at 0x7fc80a0ce7f0>, False), at 0x7fc818dfad00>, <VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc818dfad00, at 0x7fc80a0cedc0>)
entropys = []
states = []
for i in range(10):
state = gt.minimize_nested_blockmodel_dl(G_temp,
state_args=dict(recs=[G_temp.properties[('e', 'claims_held')]], rec_types=["real-exponential"]))
entropys.append(state.entropy())
states.append(state)
pd.DataFrame({'entropys': entropys}).describe()
| entropys | |
|---|---|
| count | 10.000000 |
| mean | 1955.550070 |
| std | 9.986092 |
| min | 1941.017357 |
| 25% | 1949.682928 |
| 50% | 1956.166188 |
| 75% | 1962.483771 |
| max | 1968.344881 |
index = np.argmin(entropys)
print(index)
print(entropys[index])
states[index].draw(vertex_text = G_temp.properties[('v', '_graphml_vertex_id')],
vertex_size = 10)
4 1941.017357482232
(<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc8196c86d0, at 0x7fc80983b8e0>, <GraphView object, directed, with 29 vertices and 28 edges, edges filtered by (<EdgePropertyMap object with value type 'bool', for Graph 0x7fc80a1e0220, at 0x7fc809840c70>, False), vertices filtered by (<VertexPropertyMap object with value type 'bool', for Graph 0x7fc80a1e0220, at 0x7fc809840b50>, False), at 0x7fc80a1e0220>, <VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc80a1e0220, at 0x7fc809840760>)
levels = states[index].get_levels()
for s in levels:
print(s)
if s.get_N() == 1:
break
<BlockState object with 22 blocks (4 nonempty), degree-corrected, with 1 edge covariate, for graph <Graph object, directed, with 22 vertices and 406 edges, 1 internal vertex property, 3 internal edge properties, at 0x7fc8196c86d0>, at 0x7fc8098faeb0> <BlockState object with 4 blocks (2 nonempty), with 1 edge covariate, for graph <Graph object, directed, with 22 vertices and 16 edges, 2 internal vertex properties, 1 internal edge property, at 0x7fc809917340>, at 0x7fc80991ca90> <BlockState object with 2 blocks (1 nonempty), with 1 edge covariate, for graph <Graph object, directed, with 4 vertices and 4 edges, 2 internal vertex properties, 1 internal edge property, at 0x7fc80991f670>, at 0x7fc80991c940> <BlockState object with 1 blocks (1 nonempty), with 1 edge covariate, for graph <Graph object, directed, with 2 vertices and 1 edge, 2 internal vertex properties, 1 internal edge property, at 0x7fc8099235e0>, at 0x7fc80991fcd0>
feature_graphml_dir = 'FNA Feature graphml'
if not os.path.exists(feature_graphml_dir):
print('*** generate feature graphml')
os.makedirs(feature_graphml_dir)
for one_time in time_list:
G = nx.read_graphml(graphml_dir + "/" + one_time + ".graphml")
in_strength = G.in_degree(weight = 'claims_held')
out_strength = G.out_degree(weight = 'claims_held')
# add norm
for src, tar, wgt in G.edges.data('claims_held'):
G.edges[src, tar]['claims_held_norm'] = wgt / out_strength[src]
G.edges[src, tar]['claims_held_norm_reciprocal'] = 1 / G.edges[src, tar]['claims_held_norm']
# add inherent risk
for node in list(G.nodes(data=False)):
G.nodes[node]['inherent_risk'] = in_strength[node] / out_strength[node]
# add propagated risk
propagated_risk = nx.pagerank(G = G.reverse(), # The lender has higher risk, so change the direction
nstart = dict(G.nodes.data('inherent_risk')),
weight = 'claims_held_norm')
for node in list(G.nodes(data=False)):
G.nodes[node]['propagated_risk'] = propagated_risk[node]
nx.write_graphml_lxml(G, feature_graphml_dir + "/" + one_time + ".graphml")
print("*** read graphml for nx")
G_nxs_feature = {}
for one_time in time_list:
G = nx.read_graphml(feature_graphml_dir + "/" + one_time + ".graphml")
print("nx time: " + one_time
+ ", nodes: " + str(G.number_of_nodes())
+ ", edges: " + str(G.number_of_edges()))
print(" " + one_time + ", node attrs: " + str(list(G.nodes.items())[0][1]))
print(" " + one_time + ", edge attrs: " + str(list(G.edges.items())[0][1]))
G_nxs_feature[one_time] = G
print("*** read graphml for gt")
G_gts_feature = {}
for one_time in time_list:
G = gt.load_graph(feature_graphml_dir + "/" + one_time + ".graphml")
print("gt time: " + one_time
+ ", nodes: " + str(G.num_vertices())
+ ", edges: " + str(G.num_edges()))
print(" " + one_time
+ ", node attrs: " + str(dict(G.vertex_properties).keys()))
print(" " + one_time
+ ", edge attrs: " + str(dict(G.edge_properties).keys()))
G_gts_feature[one_time] = G
*** generate feature graphml
*** read graphml for nx
nx time: 20150331, nodes: 22, edges: 406
20150331, node attrs: {'inherent_risk': 0.8359096223519187, 'propagated_risk': 0.036327899654037314}
20150331, edge attrs: {'claims_held': 0.522085, 'claims_held_reciprocal': 1.9153969181263586, 'claims_held_norm': 0.0013394901122495205, 'claims_held_norm_reciprocal': 746.5527299194574}
nx time: 20150630, nodes: 22, edges: 404
20150630, node attrs: {'inherent_risk': 0.8717347129552802, 'propagated_risk': 0.03620376543272468}
20150630, edge attrs: {'claims_held': 0.508499, 'claims_held_reciprocal': 1.9665722056483885, 'claims_held_norm': 0.0013148449139959802, 'claims_held_norm_reciprocal': 760.5459696085932}
nx time: 20150930, nodes: 22, edges: 402
20150930, node attrs: {'inherent_risk': 0.8581957826761578, 'propagated_risk': 0.03510313873428144}
20150930, edge attrs: {'claims_held': 0.485603, 'claims_held_reciprocal': 2.0592953503170284, 'claims_held_norm': 0.0013257733160539399, 'claims_held_norm_reciprocal': 754.2767589986057}
nx time: 20151231, nodes: 22, edges: 400
20151231, node attrs: {'inherent_risk': 0.9353228784830633, 'propagated_risk': 0.035493916269102165}
20151231, edge attrs: {'claims_held': 0.464434, 'claims_held_reciprocal': 2.1531584681569393, 'claims_held_norm': 0.0012678699970503434, 'claims_held_norm_reciprocal': 788.7243978692344}
nx time: 20160331, nodes: 22, edges: 403
20160331, node attrs: {'inherent_risk': 0.7244609180014627, 'propagated_risk': 0.03607330938040121}
20160331, edge attrs: {'claims_held': 0.517579, 'claims_held_reciprocal': 1.9320722054024602, 'claims_held_norm': 0.0013396333198742519, 'claims_held_norm_reciprocal': 746.4729229740774}
nx time: 20160630, nodes: 22, edges: 403
20160630, node attrs: {'inherent_risk': 0.7355449217630071, 'propagated_risk': 0.03669741517811712}
20160630, edge attrs: {'claims_held': 0.634793, 'claims_held_reciprocal': 1.575316678035202, 'claims_held_norm': 0.0016616117391745262, 'claims_held_norm_reciprocal': 601.8253099829392}
nx time: 20160930, nodes: 22, edges: 400
20160930, node attrs: {'inherent_risk': 0.6805064251496327, 'propagated_risk': 0.03698837379631897}
20160930, edge attrs: {'claims_held': 0.655929, 'claims_held_reciprocal': 1.5245552491199506, 'claims_held_norm': 0.0016744784136535128, 'claims_held_norm_reciprocal': 597.2008906451764}
nx time: 20161231, nodes: 22, edges: 402
20161231, node attrs: {'inherent_risk': 0.7759774696685273, 'propagated_risk': 0.035818653032092224}
20161231, edge attrs: {'claims_held': 0.594394, 'claims_held_reciprocal': 1.6823857575951306, 'claims_held_norm': 0.0015533879071053572, 'claims_held_norm_reciprocal': 643.7542068055869}
nx time: 20170331, nodes: 22, edges: 399
20170331, node attrs: {'inherent_risk': 0.7024555590257988, 'propagated_risk': 0.03598888638307345}
20170331, edge attrs: {'claims_held': 0.556214, 'claims_held_reciprocal': 1.797869165465091, 'claims_held_norm': 0.0013616597055586108, 'claims_held_norm_reciprocal': 734.3978792335324}
nx time: 20170630, nodes: 22, edges: 400
20170630, node attrs: {'inherent_risk': 0.7002846530256357, 'propagated_risk': 0.0356084115961773}
20170630, edge attrs: {'claims_held': 0.692545, 'claims_held_reciprocal': 1.4439494906468173, 'claims_held_norm': 0.0016669050847957047, 'claims_held_norm_reciprocal': 599.9141817499222}
nx time: 20170930, nodes: 22, edges: 401
20170930, node attrs: {'inherent_risk': 0.6635532503651508, 'propagated_risk': 0.03624206599100516}
20170930, edge attrs: {'claims_held': 0.677814, 'claims_held_reciprocal': 1.4753309905077203, 'claims_held_norm': 0.0015950339847351626, 'claims_held_norm_reciprocal': 626.945889285261}
nx time: 20171231, nodes: 22, edges: 402
20171231, node attrs: {'inherent_risk': 0.7015242195612428, 'propagated_risk': 0.0353621806241856}
20171231, edge attrs: {'claims_held': 0.706178, 'claims_held_reciprocal': 1.4160735678539973, 'claims_held_norm': 0.001648147446379585, 'claims_held_norm_reciprocal': 606.7418313796239}
nx time: 20180331, nodes: 22, edges: 401
20180331, node attrs: {'inherent_risk': 0.6750053496873261, 'propagated_risk': 0.03617604209267839}
20180331, edge attrs: {'claims_held': 0.765177, 'claims_held_reciprocal': 1.3068871646690896, 'claims_held_norm': 0.0017649920013611048, 'claims_held_norm_reciprocal': 566.5748055678622}
nx time: 20180630, nodes: 22, edges: 403
20180630, node attrs: {'inherent_risk': 0.7354940253794288, 'propagated_risk': 0.03498572788684408}
20180630, edge attrs: {'claims_held': 0.721131, 'claims_held_reciprocal': 1.3867105976584007, 'claims_held_norm': 0.0017300779721331088, 'claims_held_norm_reciprocal': 578.0086308867598}
nx time: 20180930, nodes: 22, edges: 405
20180930, node attrs: {'inherent_risk': 0.663391386304741, 'propagated_risk': 0.03517247461976675}
20180930, edge attrs: {'claims_held': 0.724015, 'claims_held_reciprocal': 1.381186853863525, 'claims_held_norm': 0.0016866646515013713, 'claims_held_norm_reciprocal': 592.8860838518539}
nx time: 20181231, nodes: 22, edges: 402
20181231, node attrs: {'inherent_risk': 0.7089821155100179, 'propagated_risk': 0.03622782306546413}
20181231, edge attrs: {'claims_held': 0.651331, 'claims_held_reciprocal': 1.5353176802578106, 'claims_held_norm': 0.0015306258928652018, 'claims_held_norm_reciprocal': 653.327507826282}
nx time: 20190331, nodes: 22, edges: 407
20190331, node attrs: {'inherent_risk': 0.6119777507755584, 'propagated_risk': 0.03594987272278945}
20190331, edge attrs: {'claims_held': 0.669325, 'claims_held_reciprocal': 1.4940425055092819, 'claims_held_norm': 0.0015312227675064877, 'claims_held_norm_reciprocal': 653.0728390542711}
nx time: 20190630, nodes: 22, edges: 404
20190630, node attrs: {'inherent_risk': 0.6008645652869773, 'propagated_risk': 0.03642819572685735}
20190630, edge attrs: {'claims_held': 0.727638, 'claims_held_reciprocal': 1.3743097529265926, 'claims_held_norm': 0.0016416134144234473, 'claims_held_norm_reciprocal': 609.1568156143577}
nx time: 20190930, nodes: 22, edges: 408
20190930, node attrs: {'inherent_risk': 0.6267124978706184, 'propagated_risk': 0.035810238460497155}
20190930, edge attrs: {'claims_held': 0.705982, 'claims_held_reciprocal': 1.4164667087829435, 'claims_held_norm': 0.0016122530261698952, 'claims_held_norm_reciprocal': 620.2500375363678}
nx time: 20191231, nodes: 22, edges: 406
20191231, node attrs: {'inherent_risk': 0.6838906957539285, 'propagated_risk': 0.03598218968189507}
20191231, edge attrs: {'claims_held': 0.680489, 'claims_held_reciprocal': 1.469531469281649, 'claims_held_norm': 0.0015027868124710415, 'claims_held_norm_reciprocal': 665.4303802118772}
nx time: 20200331, nodes: 22, edges: 409
20200331, node attrs: {'inherent_risk': 0.6954238388004949, 'propagated_risk': 0.03487145069027153}
20200331, edge attrs: {'claims_held': 0.690694, 'claims_held_reciprocal': 1.4478191500143334, 'claims_held_norm': 0.0015241460987777985, 'claims_held_norm_reciprocal': 656.105081266089}
nx time: 20200630, nodes: 22, edges: 408
20200630, node attrs: {'inherent_risk': 0.6204131147998669, 'propagated_risk': 0.033923847675701294}
20200630, edge attrs: {'claims_held': 0.867392, 'claims_held_reciprocal': 1.1528812808972182, 'claims_held_norm': 0.001768669482294555, 'claims_held_norm_reciprocal': 565.3967629399394}
nx time: 20200930, nodes: 22, edges: 406
20200930, node attrs: {'inherent_risk': 0.6001195333484551, 'propagated_risk': 0.03399973359936588}
20200930, edge attrs: {'claims_held': 0.746455, 'claims_held_reciprocal': 1.3396654855282637, 'claims_held_norm': 0.001521718449345018, 'claims_held_norm_reciprocal': 657.1517881185068}
nx time: 20201231, nodes: 22, edges: 408
20201231, node attrs: {'inherent_risk': 0.6431382953593519, 'propagated_risk': 0.034350483406970306}
20201231, edge attrs: {'claims_held': 0.706643, 'claims_held_reciprocal': 1.4151417335203207, 'claims_held_norm': 0.0013248451933569362, 'claims_held_norm_reciprocal': 754.8051689466957}
nx time: 20210331, nodes: 22, edges: 405
20210331, node attrs: {'inherent_risk': 0.5439169653898268, 'propagated_risk': 0.03239908325466501}
20210331, edge attrs: {'claims_held': 0.556745, 'claims_held_reciprocal': 1.79615443335818, 'claims_held_norm': 0.0010313662800666675, 'claims_held_norm_reciprocal': 969.5876424574983}
nx time: 20210630, nodes: 22, edges: 407
20210630, node attrs: {'inherent_risk': 0.5668552464780188, 'propagated_risk': 0.03427402842815736}
20210630, edge attrs: {'claims_held': 0.536295, 'claims_held_reciprocal': 1.8646453910627547, 'claims_held_norm': 0.0009826939837594112, 'claims_held_norm_reciprocal': 1017.6107888382329}
nx time: 20210930, nodes: 22, edges: 407
20210930, node attrs: {'inherent_risk': 0.5392443909252684, 'propagated_risk': 0.03337062656640623}
20210930, edge attrs: {'claims_held': 0.468677, 'claims_held_reciprocal': 2.1336656161919616, 'claims_held_norm': 0.0008847816844231451, 'claims_held_norm_reciprocal': 1130.2223108878823}
*** read graphml for gt
gt time: 20150331, nodes: 22, edges: 406
20150331, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20150331, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20150630, nodes: 22, edges: 404
20150630, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20150630, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20150930, nodes: 22, edges: 402
20150930, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20150930, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20151231, nodes: 22, edges: 400
20151231, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20151231, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20160331, nodes: 22, edges: 403
20160331, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20160331, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20160630, nodes: 22, edges: 403
20160630, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20160630, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20160930, nodes: 22, edges: 400
20160930, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20160930, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20161231, nodes: 22, edges: 402
20161231, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20161231, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20170331, nodes: 22, edges: 399
20170331, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20170331, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20170630, nodes: 22, edges: 400
20170630, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20170630, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20170930, nodes: 22, edges: 401
20170930, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20170930, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20171231, nodes: 22, edges: 402
20171231, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20171231, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20180331, nodes: 22, edges: 401
20180331, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20180331, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20180630, nodes: 22, edges: 403
20180630, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20180630, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20180930, nodes: 22, edges: 405
20180930, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20180930, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20181231, nodes: 22, edges: 402
20181231, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20181231, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20190331, nodes: 22, edges: 407
20190331, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20190331, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20190630, nodes: 22, edges: 404
20190630, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20190630, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20190930, nodes: 22, edges: 408
20190930, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20190930, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20191231, nodes: 22, edges: 406
20191231, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20191231, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20200331, nodes: 22, edges: 409
20200331, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20200331, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20200630, nodes: 22, edges: 408
20200630, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20200630, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20200930, nodes: 22, edges: 406
20200930, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20200930, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20201231, nodes: 22, edges: 408
20201231, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20201231, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20210331, nodes: 22, edges: 405
20210331, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20210331, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20210630, nodes: 22, edges: 407
20210630, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20210630, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
gt time: 20210930, nodes: 22, edges: 407
20210930, node attrs: dict_keys(['_graphml_vertex_id', 'inherent_risk', 'propagated_risk'])
20210930, edge attrs: dict_keys(['_graphml_edge_id', 'claims_held', 'claims_held_norm', 'claims_held_norm_reciprocal', 'claims_held_reciprocal'])
G_temp = G_nxs_feature[time_list[0]]
def disparity_filter(G, alpha):
s = G.out_degree(weight = 'claims_held')
k = G.out_degree(weight = None)
def pij(i, j):
w = G.edges[i, j]['claims_held']
return ((1 - (w / s[i])) ** (k[i] - 1)) < alpha
return nx.subgraph_view(G, filter_edge = pij)
G_filt = disparity_filter(G_temp, .1)
print('Filtered network:', len(G_filt.edges), 'edges remain out of', len(G_temp.edges))
plt.figure(3, figsize = (10, 20))
plt.subplot(2, 1, 1)
plt.title('Inherent Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('inherent_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.subplot(2, 1, 2)
plt.title('Propagated Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('propagated_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.show()
Filtered network: 60 edges remain out of 406
# Japan in
G_filt.in_edges('Japan',data='claims_held_norm')
InEdgeDataView([('Australia', 'Japan', 0.2630001866103677), ('Canada', 'Japan', 0.1632493233762417), ('France', 'Japan', 0.1583340820653159), ('South_Korea', 'Japan', 0.21206383293224682), ('Netherlands', 'Japan', 0.12801184163062135), ('Sweden', 'Japan', 0.1590112077574935), ('United_States', 'Japan', 0.25687145260347677)])
# Switzerland in
G_filt.in_edges('Switzerland',data='claims_held_norm')
InEdgeDataView([])
# Sweden in
G_filt.in_edges('Sweden',data='claims_held_norm')
InEdgeDataView([('Finland', 'Sweden', 0.6107958070101474)])
# Greece in
G_filt.in_edges('Greece',data='claims_held_norm')
InEdgeDataView([('Turkey', 'Greece', 0.15875132337244569)])
G_filt.out_edges('United_States',data='claims_held_norm')
OutEdgeDataView([('United_States', 'Canada', 0.1410859036275138), ('United_States', 'United_Kingdom', 0.17598350025983786), ('United_States', 'Japan', 0.25687145260347677)])
G_temp = G_nxs_feature[time_list[0]]
# Identify disconnected countries.
isolates = set(nx.isolates(G_temp))
# Calculate total strength.
country_size = sorted(
list(G_temp.degree(weight = 'claims_held_norm')),
key = lambda pair: pair[1],
reverse = True)
plt.figure(figsize = (12, 4))
plt.title('Total Normalized Lending and Borrowing')
xticks = np.arange(len(country_size))
plt.bar(xticks, [0 if name in isolates else size for name, size in country_size])
plt.bar(xticks, [size if name in isolates else 0 for name, size in country_size])
plt.xticks(xticks, [name for name, size in country_size], rotation = 45)
plt.ylabel('Node Strength')
plt.show()
score_closeness = nx.closeness_centrality(G_temp, distance='claims_held_norm_reciprocal')
score_betweenness = nx.betweenness_centrality(G_temp, weight='claims_held_norm_reciprocal')
pd.DataFrame({
'In-strength': top_n(G_temp.in_degree(weight='claims_held_norm'),10),
'Out-strength': top_n(G_temp.out_degree(weight='claims_held_norm'),10),
'Total-strength': top_n(G_temp.degree(weight='claims_held_norm'),10),
'Closeness': top_n(score_closeness.items(),10),
'Betweenness': top_n(score_betweenness.items(),10)
}).T
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | |
|---|---|---|---|---|---|---|---|---|---|---|
| In-strength | United_States | France | United_Kingdom | Germany | Japan | Spain | Netherlands | Italy | Sweden | Switzerland |
| Out-strength | Spain | Australia | Belgium | Switzerland | Germany | France | Finland | United_Kingdom | India | South_Korea |
| Total-strength | United_States | France | United_Kingdom | Germany | Japan | Spain | Netherlands | Italy | Sweden | Switzerland |
| Closeness | United_States | France | United_Kingdom | Germany | Japan | Spain | Canada | Netherlands | Italy | Switzerland |
| Betweenness | United_Kingdom | Germany | France | United_States | Spain | Netherlands | Sweden | Japan | Italy | Greece |
G_temp = G_gts_feature[time_list[0]]
entropys = []
states = []
for i in range(10):
state = gt.minimize_nested_blockmodel_dl(G_temp,
state_args=dict(recs=[G_temp.properties[('e', 'claims_held_norm_reciprocal')]], rec_types=["real-exponential"]))
entropys.append(state.entropy())
states.append(state)
pd.DataFrame({'entropys': entropys}).describe()
| entropys | |
|---|---|
| count | 10.000000 |
| mean | 3677.656510 |
| std | 93.886704 |
| min | 3616.473103 |
| 25% | 3616.769599 |
| 50% | 3616.769599 |
| 75% | 3712.106115 |
| max | 3880.131417 |
index = np.argmin(entropys)
print(index)
print(entropys[index])
states[index].draw(vertex_text = G_temp.properties[('v', '_graphml_vertex_id')],
vertex_size = 10)
8 3616.473102697209
(<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc80a1a5820, at 0x7fc80976e250>, <GraphView object, directed, with 28 vertices and 27 edges, edges filtered by (<EdgePropertyMap object with value type 'bool', for Graph 0x7fc80976e340, at 0x7fc8097382e0>, False), vertices filtered by (<VertexPropertyMap object with value type 'bool', for Graph 0x7fc80976e340, at 0x7fc80976cf70>, False), at 0x7fc80976e340>, <VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc80976e340, at 0x7fc80976cdc0>)
# 2021.03.31 within COVID-19
G_temp = G_nxs_feature['20210331']
def disparity_filter(G, alpha):
s = G.out_degree(weight = 'claims_held')
k = G.out_degree(weight = None)
def pij(i, j):
w = G.edges[i, j]['claims_held']
return ((1 - (w / s[i])) ** (k[i] - 1)) < alpha
return nx.subgraph_view(G, filter_edge = pij)
G_filt = disparity_filter(G_temp, .1)
print('Filtered network:', len(G_filt.edges), 'edges remain out of', len(G_temp.edges))
plt.figure(3, figsize = (10, 20))
plt.subplot(2, 1, 1)
plt.title('Inherent Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('inherent_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.subplot(2, 1, 2)
plt.title('Propagated Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('propagated_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.show()
Filtered network: 64 edges remain out of 405
pd.DataFrame({
'inherent_risk': [id for id, wgt in sorted(G_filt.nodes.data('inherent_risk'), key = lambda p: -p[1])][:15],
'propagated': [id for id, wgt in sorted(G_filt.nodes.data('propagated_risk'), key = lambda p: -p[1])][:15]
}).T
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| inherent_risk | Canada | Spain | Japan | Switzerland | Netherlands | France | Greece | Sweden | United_Kingdom | Italy | Germany | Austria | Australia | United_States | Belgium |
| propagated | United_Kingdom | Spain | Greece | Germany | France | Finland | Netherlands | United_States | Ireland | Austria | Italy | Switzerland | Canada | Sweden | Portugal |
G_filt.in_edges(['Spain'], data='claims_held')
InEdgeDataView([('Chile', 'Spain', 66.549508), ('United_Kingdom', 'Spain', 498.325), ('Italy', 'Spain', 109.265), ('Portugal', 'Spain', 124.96), ('Turkey', 'Spain', 59.869408)])
G_filt.out_edges(['Portugal'], data='claims_held_norm')
OutEdgeDataView([('Portugal', 'Spain', 0.7036374304976443), ('Portugal', 'France', 0.14089078223816867)])
G_filt.out_edges(['Greece'], data='claims_held_norm')
OutEdgeDataView([('Greece', 'Germany', 0.22919726293271056), ('Greece', 'United_States', 0.16489579329754392)])
G_20190331 = G_gts_feature['20190331']
G_20210331 = G_gts_feature['20210331']
entropys_20190331 = []
states_20190331 = []
entropys_20210331 = []
states_20210331 = []
for i in range(10):
state = gt.minimize_nested_blockmodel_dl(G_20190331,
state_args=dict(recs=[G_20190331.properties[('e', 'claims_held_norm_reciprocal')]], rec_types=["real-exponential"]))
entropys_20190331.append(state.entropy())
states_20190331.append(state)
for i in range(10):
state = gt.minimize_nested_blockmodel_dl(G_20210331,
state_args=dict(recs=[G_20210331.properties[('e', 'claims_held_norm_reciprocal')]], rec_types=["real-exponential"]))
entropys_20210331.append(state.entropy())
states_20210331.append(state)
pd.DataFrame({'entropys_20190331': entropys_20190331, 'entropys_20210331':entropys_20210331}).describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| entropys_20190331 | 10.0 | 3648.298268 | 40.483176 | 3606.227104 | 3616.253145 | 3636.570287 | 3678.204710 | 3719.649394 |
| entropys_20210331 | 10.0 | 3632.736994 | 117.436178 | 3591.627352 | 3591.627352 | 3591.627352 | 3603.667324 | 3966.256348 |
index_20190331 = np.argmin(entropys_20190331)
states_20190331[index_20190331].draw(vertex_text = G_20190331.properties[('v', '_graphml_vertex_id')],
vertex_size = 10)
(<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc80a221940, at 0x7fc80a391910>, <GraphView object, directed, with 28 vertices and 27 edges, edges filtered by (<EdgePropertyMap object with value type 'bool', for Graph 0x7fc8180cef10, at 0x7fc809259ac0>, False), vertices filtered by (<VertexPropertyMap object with value type 'bool', for Graph 0x7fc8180cef10, at 0x7fc8092599a0>, False), at 0x7fc8180cef10>, <VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc8180cef10, at 0x7fc809259160>)
index_20210331 = np.argmin(entropys_20210331)
states_20210331[index_20210331].draw(vertex_text = G_20210331.properties[('v', '_graphml_vertex_id')],
vertex_size = 10)
(<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc80a2144f0, at 0x7fc809256d00>, <GraphView object, directed, with 28 vertices and 27 edges, edges filtered by (<EdgePropertyMap object with value type 'bool', for Graph 0x7fc80a412b50, at 0x7fc80925e850>, False), vertices filtered by (<VertexPropertyMap object with value type 'bool', for Graph 0x7fc80a412b50, at 0x7fc80925eaf0>, False), at 0x7fc80a412b50>, <VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc80a412b50, at 0x7fc80925ea60>)
# Canada and Portugal
# Canada out
data_to_add = []
df_2019 = pd.DataFrame(columns=['out country', '20190331 claims_held', '20190331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20190331'].edges('Canada', data=True):
data_to_add.append({'out country':des,
'20190331 claims_held':attr_dict['claims_held'],
'20190331 claims_held_norm':attr_dict['claims_held_norm']})
df_2019 = pd.concat([df_2019, pd.DataFrame(data_to_add)], ignore_index=True)
data_to_add = []
df_2021 = pd.DataFrame(columns=['out country', '20210331 claims_held', '20210331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20210331'].edges('Canada', data=True):
data_to_add.append({'out country':des,
'20210331 claims_held':attr_dict['claims_held'],
'20210331 claims_held_norm':attr_dict['claims_held_norm']})
df_2021 = pd.concat([df_2021, pd.DataFrame(data_to_add)], ignore_index=True)
df_Canada_out = df_2019.merge(df_2021, how='outer', on=['out country'])
df_Canada_out['claims_diff'] = df_Canada_out['20210331 claims_held'] - df_Canada_out['20190331 claims_held']
df_Canada_out[['out country',
'20190331 claims_held','20210331 claims_held',
'20190331 claims_held_norm','20210331 claims_held_norm',
'claims_diff']]
| out country | 20190331 claims_held | 20210331 claims_held | 20190331 claims_held_norm | 20210331 claims_held_norm | claims_diff | |
|---|---|---|---|---|---|---|
| 0 | Austria | 1.326616 | 1.791729 | 0.002989 | 0.003174 | 0.465113 |
| 1 | Australia | 11.312021 | 13.157539 | 0.025485 | 0.023306 | 1.845518 |
| 2 | Belgium | 3.325 | 4.83 | 0.007491 | 0.008555 | 1.505 |
| 3 | Switzerland | 23.986149 | 29.811022 | 0.054038 | 0.052805 | 5.824873 |
| 4 | Chile | 0.059385 | 0.029754 | 0.000134 | 0.000053 | -0.029631 |
| 5 | Germany | 34.167 | 38.868 | 0.076975 | 0.068847 | 4.701 |
| 6 | Spain | 4.715695 | 5.201965 | 0.010624 | 0.009214 | 0.48627 |
| 7 | Finland | 3.066 | 3.532 | 0.006907 | 0.006256 | 0.466 |
| 8 | France | 30.368 | 36.636 | 0.068416 | 0.064894 | 6.268 |
| 9 | United_Kingdom | 99.638 | 127.598 | 0.224474 | 0.226015 | 27.96 |
| 10 | Greece | 0.048 | 0.02 | 0.000108 | 0.000035 | -0.028 |
| 11 | Ireland | 1.625071 | 2.011818 | 0.003661 | 0.003564 | 0.386747 |
| 12 | Italy | 2.479197 | 2.956778 | 0.005585 | 0.005237 | 0.477581 |
| 13 | Japan | 76.2832 | 101.892 | 0.171858 | 0.180482 | 25.6088 |
| 14 | South_Korea | 3.016028 | 4.496539 | 0.006795 | 0.007965 | 1.480511 |
| 15 | Netherlands | 14.591781 | 18.667296 | 0.032874 | 0.033066 | 4.075515 |
| 16 | Portugal | 0.124766 | 0.132552 | 0.000281 | 0.000235 | 0.007786 |
| 17 | Sweden | 2.43288 | 0.792652 | 0.005481 | 0.001404 | -1.640228 |
| 18 | Turkey | 0.014394 | 0.031848 | 0.000032 | 0.000056 | 0.017454 |
| 19 | United_States | 131.295 | 172.097 | 0.295793 | 0.304837 | 40.802 |
# Canada in
data_to_add = []
df_2019 = pd.DataFrame(columns=['in country', '20190331 claims_held', '20190331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20190331'].in_edges('Canada', data=True):
data_to_add.append({'in country':src,
'20190331 claims_held':attr_dict['claims_held'],
'20190331 claims_held_norm':attr_dict['claims_held_norm']})
df_2019 = pd.concat([df_2019, pd.DataFrame(data_to_add)], ignore_index=True)
data_to_add = []
df_2021 = pd.DataFrame(columns=['in country', '20210331 claims_held', '20210331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20210331'].in_edges('Canada', data=True):
data_to_add.append({'in country':src,
'20210331 claims_held':attr_dict['claims_held'],
'20210331 claims_held_norm':attr_dict['claims_held_norm']})
df_2021 = pd.concat([df_2021, pd.DataFrame(data_to_add)], ignore_index=True)
df_Canada_in = df_2019.merge(df_2021, how='outer', on=['in country'])
df_Canada_in['claims_diff'] = df_Canada_in['20210331 claims_held'] - df_Canada_in['20190331 claims_held']
df_Canada_in[['in country',
'20190331 claims_held','20210331 claims_held',
'20190331 claims_held_norm','20210331 claims_held_norm',
'claims_diff']]
| in country | 20190331 claims_held | 20210331 claims_held | 20190331 claims_held_norm | 20210331 claims_held_norm | claims_diff | |
|---|---|---|---|---|---|---|
| 0 | Australia | 21.147346 | 22.575065 | 0.048379 | 0.04182 | 1.427719 |
| 1 | Austria | 1.919362 | 2.327722 | 0.010432 | 0.010121 | 0.40836 |
| 2 | Belgium | 2.263015 | 1.546472 | 0.005943 | 0.00343 | -0.716543 |
| 3 | Switzerland | 8.984105 | 15.117394 | 0.024047 | 0.036245 | 6.133289 |
| 4 | Germany | 32.363637 | 25.810481 | 0.023281 | 0.014946 | -6.553156 |
| 5 | Spain | 1.732413 | 2.006919 | 0.004242 | 0.004009 | 0.274506 |
| 6 | Finland | 2.367553 | 2.398883 | 0.017422 | 0.015812 | 0.03133 |
| 7 | France | 25.279107 | 32.708234 | 0.020607 | 0.021706 | 7.429127 |
| 8 | United_Kingdom | 126.916 | 182.298 | 0.05601 | 0.072208 | 55.382 |
| 9 | Ireland | 30.934869 | 39.82974 | 0.07116 | 0.07394 | 8.894871 |
| 10 | Italy | 1.574582 | 0.495262 | 0.002303 | 0.000637 | -1.07932 |
| 11 | Japan | 40.974525 | 48.412514 | 0.039447 | 0.040436 | 7.437989 |
| 12 | South_Korea | 1.209863 | 1.195375 | 0.004339 | 0.003598 | -0.014488 |
| 13 | Netherlands | 8.688485 | 8.95013 | 0.017713 | 0.017262 | 0.261645 |
| 14 | Sweden | 7.335088 | 6.347792 | 0.054644 | 0.043315 | -0.987296 |
| 15 | United_States | 1168.04 | 1459.4 | 0.189107 | 0.202203 | 291.36 |
| 16 | Greece | NaN | 0.000472 | NaN | 0.000027 | NaN |
# Portugal out
data_to_add = []
df_2019 = pd.DataFrame(columns=['out country', '20190331 claims_held', '20190331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20190331'].edges('Portugal', data=True):
data_to_add.append({'out country':des,
'20190331 claims_held':attr_dict['claims_held'],
'20190331 claims_held_norm':attr_dict['claims_held_norm']})
df_2019 = pd.concat([df_2019, pd.DataFrame(data_to_add)], ignore_index=True)
data_to_add = []
df_2021 = pd.DataFrame(columns=['out country', '20210331 claims_held', '20210331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20210331'].edges('Portugal', data=True):
data_to_add.append({'out country':des,
'20210331 claims_held':attr_dict['claims_held'],
'20210331 claims_held_norm':attr_dict['claims_held_norm']})
df_2021 = pd.concat([df_2021, pd.DataFrame(data_to_add)], ignore_index=True)
df_Portugal_out = df_2019.merge(df_2021, how='outer', on=['out country'])
df_Portugal_out['claims_diff'] = df_Portugal_out['20210331 claims_held'] - df_Portugal_out['20190331 claims_held']
df_Portugal_out[['out country',
'20190331 claims_held','20210331 claims_held',
'20190331 claims_held_norm','20210331 claims_held_norm',
'claims_diff']]
| out country | 20190331 claims_held | 20210331 claims_held | 20190331 claims_held_norm | 20210331 claims_held_norm | claims_diff | |
|---|---|---|---|---|---|---|
| 0 | Austria | 0.362732 | 0.362664 | 0.002408 | 0.002042 | -0.000068 |
| 1 | Australia | 0.011246 | 0.10485 | 0.000075 | 0.00059 | 0.093604 |
| 2 | Belgium | 0.46 | 0.559 | 0.003053 | 0.003148 | 0.099 |
| 3 | Germany | 9.92 | 8.905 | 0.065847 | 0.050143 | -1.015 |
| 4 | Spain | 105.147 | 124.96 | 0.697946 | 0.703637 | 19.813 |
| 5 | Finland | 0.048 | 0.137 | 0.000319 | 0.000771 | 0.089 |
| 6 | France | 18.722 | 25.021 | 0.124273 | 0.140891 | 6.299 |
| 7 | United_Kingdom | 1.667 | 1.082 | 0.011065 | 0.006093 | -0.585 |
| 8 | Greece | 0.367 | 0.469 | 0.002436 | 0.002641 | 0.102 |
| 9 | Ireland | 0.572673 | 1.257716 | 0.003801 | 0.007082 | 0.685043 |
| 10 | Italy | 5.704083 | 7.472428 | 0.037863 | 0.042077 | 1.768345 |
| 11 | Japan | 1.1256 | 1.4374 | 0.007472 | 0.008094 | 0.3118 |
| 12 | South_Korea | 0.082988 | 0.030305 | 0.000551 | 0.000171 | -0.052683 |
| 13 | Netherlands | 2.255572 | 1.99799 | 0.014972 | 0.01125 | -0.257582 |
| 14 | Sweden | 0.128055 | 0.183319 | 0.00085 | 0.001032 | 0.055264 |
| 15 | Turkey | 0.002215 | 0.00079 | 0.000015 | 0.000004 | -0.001425 |
| 16 | United_States | 4.076 | 3.611 | 0.027056 | 0.020333 | -0.465 |
# Portugal in
data_to_add = []
df_2019 = pd.DataFrame(columns=['in country', '20190331 claims_held', '20190331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20190331'].in_edges('Portugal', data=True):
data_to_add.append({'in country':src,
'20190331 claims_held':attr_dict['claims_held'],
'20190331 claims_held_norm':attr_dict['claims_held_norm']})
df_2019 = pd.concat([df_2019, pd.DataFrame(data_to_add)], ignore_index=True)
data_to_add = []
df_2021 = pd.DataFrame(columns=['in country', '20210331 claims_held', '20210331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20210331'].in_edges('Portugal', data=True):
data_to_add.append({'in country':src,
'20210331 claims_held':attr_dict['claims_held'],
'20210331 claims_held_norm':attr_dict['claims_held_norm']})
df_2021 = pd.concat([df_2021, pd.DataFrame(data_to_add)], ignore_index=True)
df_Portugal_in = df_2019.merge(df_2021, how='outer', on=['in country'])
df_Portugal_in['claims_diff'] = df_Portugal_in['20210331 claims_held'] - df_Portugal_in['20190331 claims_held']
df_Portugal_in[['in country',
'20190331 claims_held','20210331 claims_held',
'20190331 claims_held_norm','20210331 claims_held_norm',
'claims_diff']]
| in country | 20190331 claims_held | 20210331 claims_held | 20190331 claims_held_norm | 20210331 claims_held_norm | claims_diff | |
|---|---|---|---|---|---|---|
| 0 | Australia | 0.136822 | 0.078891 | 0.000313 | 0.000146 | -0.057931 |
| 1 | Austria | 0.1844 | 0.303853 | 0.001002 | 0.001321 | 0.119453 |
| 2 | Belgium | 0.553313 | 0.351384 | 0.001453 | 0.000779 | -0.201929 |
| 3 | Canada | 0.124766 | 0.132552 | 0.000281 | 0.000235 | 0.007786 |
| 4 | Switzerland | 1.819297 | 2.225883 | 0.00487 | 0.005337 | 0.406586 |
| 5 | Chile | 0.045116 | 0.059417 | 0.000529 | 0.000644 | 0.014301 |
| 6 | Germany | 2.474712 | 2.717083 | 0.00178 | 0.001573 | 0.242371 |
| 7 | Spain | 18.480084 | 16.967643 | 0.045247 | 0.033891 | -1.512441 |
| 8 | Finland | 0.201811 | 0.315824 | 0.001485 | 0.002082 | 0.114013 |
| 9 | France | 5.711607 | 7.065255 | 0.004656 | 0.004689 | 1.353648 |
| 10 | United_Kingdom | 2.220809 | 2.492629 | 0.00098 | 0.000987 | 0.27182 |
| 11 | Greece | 0.2093 | 0.515956 | 0.00728 | 0.029077 | 0.306656 |
| 12 | Ireland | 1.542329 | 1.845977 | 0.003548 | 0.003427 | 0.303648 |
| 13 | India | 0.07329 | 0.089081 | 0.000285 | 0.000371 | 0.015791 |
| 14 | Italy | 8.611367 | 12.246638 | 0.012593 | 0.015748 | 3.635271 |
| 15 | Japan | 0.141101 | 0.213069 | 0.000136 | 0.000178 | 0.071968 |
| 16 | South_Korea | 0.000943 | 0.031935 | 0.000003 | 0.000096 | 0.030992 |
| 17 | Netherlands | 1.590697 | 2.689452 | 0.003243 | 0.005187 | 1.098755 |
| 18 | Sweden | 0.154093 | 0.205281 | 0.001148 | 0.001401 | 0.051188 |
| 19 | Turkey | 0.233778 | 0.223205 | 0.001549 | 0.001719 | -0.010573 |
| 20 | United_States | 1.863351 | 2.762204 | 0.000302 | 0.000383 | 0.898853 |
# Italy out
data_to_add = []
df_2019 = pd.DataFrame(columns=['out country', '20190331 claims_held', '20190331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20190331'].edges('Italy', data=True):
data_to_add.append({'out country':des,
'20190331 claims_held':attr_dict['claims_held'],
'20190331 claims_held_norm':attr_dict['claims_held_norm']})
df_2019 = pd.concat([df_2019, pd.DataFrame(data_to_add)], ignore_index=True)
data_to_add = []
df_2021 = pd.DataFrame(columns=['out country', '20210331 claims_held', '20210331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20210331'].edges('Italy', data=True):
data_to_add.append({'out country':des,
'20210331 claims_held':attr_dict['claims_held'],
'20210331 claims_held_norm':attr_dict['claims_held_norm']})
df_2021 = pd.concat([df_2021, pd.DataFrame(data_to_add)], ignore_index=True)
df_Italy_out = df_2019.merge(df_2021, how='outer', on=['out country'])
df_Italy_out['claims_diff'] = df_Italy_out['20210331 claims_held'] - df_Italy_out['20190331 claims_held']
df_Italy_out[['out country',
'20190331 claims_held','20210331 claims_held',
'20190331 claims_held_norm','20210331 claims_held_norm',
'claims_diff']]
| out country | 20190331 claims_held | 20210331 claims_held | 20190331 claims_held_norm | 20210331 claims_held_norm | claims_diff | |
|---|---|---|---|---|---|---|
| 0 | Austria | 4.919401 | 5.453861 | 0.007194 | 0.007013 | 0.53446 |
| 1 | Australia | 0.227788 | 0.390702 | 0.000333 | 0.000502 | 0.162914 |
| 2 | Belgium | 4.75 | 4.02 | 0.006946 | 0.005169 | -0.73 |
| 3 | Canada | 1.574582 | 0.495262 | 0.002303 | 0.000637 | -1.07932 |
| 4 | Switzerland | 15.549292 | 17.14311 | 0.022739 | 0.022044 | 1.593818 |
| 5 | Chile | 0.006028 | 0.005978 | 0.000009 | 0.000008 | -0.00005 |
| 6 | Germany | 89.776 | 89.87 | 0.131288 | 0.115561 | 0.094 |
| 7 | Spain | 74.76027 | 109.265 | 0.109329 | 0.140501 | 34.50473 |
| 8 | Finland | 0.184 | NaN | 0.000269 | NaN | NaN |
| 9 | France | 327.308 | 372.293 | 0.478652 | 0.47872 | 44.985 |
| 10 | United_Kingdom | 31.182 | 19.494 | 0.0456 | 0.025067 | -11.688 |
| 11 | Greece | 3.513 | 6.933 | 0.005137 | 0.008915 | 3.42 |
| 12 | Ireland | 1.572325 | 0.604465 | 0.002299 | 0.000777 | -0.96786 |
| 13 | Japan | 33.8155 | 52.7713 | 0.049451 | 0.067857 | 18.9558 |
| 14 | South_Korea | 0.237425 | 0.226043 | 0.000347 | 0.000291 | -0.011382 |
| 15 | Netherlands | 27.997413 | 25.653616 | 0.040943 | 0.032987 | -2.343797 |
| 16 | Portugal | 8.611367 | 12.246638 | 0.012593 | 0.015748 | 3.635271 |
| 17 | Sweden | 0.130025 | 0.070025 | 0.00019 | 0.00009 | -0.06 |
| 18 | Turkey | 0.23257 | 0.129022 | 0.00034 | 0.000166 | -0.103548 |
| 19 | United_States | 57.465 | 60.619 | 0.084036 | 0.077948 | 3.154 |
# Italy in
data_to_add = []
df_2019 = pd.DataFrame(columns=['in country', '20190331 claims_held', '20190331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20190331'].in_edges('Italy', data=True):
data_to_add.append({'in country':src,
'20190331 claims_held':attr_dict['claims_held'],
'20190331 claims_held_norm':attr_dict['claims_held_norm']})
df_2019 = pd.concat([df_2019, pd.DataFrame(data_to_add)], ignore_index=True)
data_to_add = []
df_2021 = pd.DataFrame(columns=['in country', '20210331 claims_held', '20210331 claims_held_norm'])
for src, des, attr_dict in G_nxs_feature['20210331'].in_edges('Italy', data=True):
data_to_add.append({'in country':src,
'20210331 claims_held':attr_dict['claims_held'],
'20210331 claims_held_norm':attr_dict['claims_held_norm']})
df_2021 = pd.concat([df_2021, pd.DataFrame(data_to_add)], ignore_index=True)
df_Italy_in = df_2019.merge(df_2021, how='outer', on=['in country'])
df_Italy_in['claims_diff'] = df_Italy_in['20210331 claims_held'] - df_Italy_in['20190331 claims_held']
df_Italy_in[['in country',
'20190331 claims_held','20210331 claims_held',
'20190331 claims_held_norm','20210331 claims_held_norm',
'claims_diff']]
| in country | 20190331 claims_held | 20210331 claims_held | 20190331 claims_held_norm | 20210331 claims_held_norm | claims_diff | |
|---|---|---|---|---|---|---|
| 0 | Australia | 1.147752 | 2.397472 | 0.002626 | 0.004441 | 1.24972 |
| 1 | Austria | 80.254893 | 106.797 | 0.436211 | 0.464374 | 26.542107 |
| 2 | Belgium | 8.917215 | 12.505413 | 0.023419 | 0.027738 | 3.588198 |
| 3 | Canada | 2.479197 | 2.956778 | 0.005585 | 0.005237 | 0.477581 |
| 4 | Switzerland | 15.239844 | 18.580399 | 0.040792 | 0.044548 | 3.340555 |
| 5 | Chile | 0.349842 | NaN | 0.004099 | NaN | NaN |
| 6 | Germany | 167.188 | 230.738 | 0.120266 | 0.133614 | 63.55 |
| 7 | Spain | 62.303733 | 86.446939 | 0.152547 | 0.172667 | 24.143206 |
| 8 | Finland | 1.936336 | 2.85662 | 0.014249 | 0.018829 | 0.920284 |
| 9 | France | 63.310806 | 68.394058 | 0.051609 | 0.045388 | 5.083252 |
| 10 | United_Kingdom | 46.445994 | 40.219739 | 0.020497 | 0.015931 | -6.226255 |
| 11 | Greece | 2.53233 | 1.371391 | 0.088077 | 0.077285 | -1.160939 |
| 12 | Ireland | 11.116013 | 18.954244 | 0.02557 | 0.035187 | 7.838231 |
| 13 | India | 1.462469 | 1.51807 | 0.005688 | 0.006317 | 0.055601 |
| 14 | Japan | 14.821657 | 19.659433 | 0.014269 | 0.01642 | 4.837776 |
| 15 | South_Korea | 0.326356 | 2.085503 | 0.00117 | 0.006277 | 1.759147 |
| 16 | Netherlands | 18.615587 | 26.788284 | 0.037951 | 0.051666 | 8.172697 |
| 17 | Portugal | 5.704083 | 7.472428 | 0.037863 | 0.042077 | 1.768345 |
| 18 | Sweden | 1.618274 | 2.01797 | 0.012056 | 0.01377 | 0.399696 |
| 19 | Turkey | 12.171312 | 6.355708 | 0.08065 | 0.048935 | -5.815604 |
| 20 | United_States | 52.251121 | 54.521675 | 0.00846 | 0.007554 | 2.270554 |
# see the risk distribution
G_temp = G_nxs_feature['20190331']
def disparity_filter(G, alpha):
s = G.out_degree(weight = 'claims_held')
k = G.out_degree(weight = None)
def pij(i, j):
w = G.edges[i, j]['claims_held']
return ((1 - (w / s[i])) ** (k[i] - 1)) < alpha
return nx.subgraph_view(G, filter_edge = pij)
G_filt = disparity_filter(G_temp, .1)
print('Filtered network:', len(G_filt.edges), 'edges remain out of', len(G_temp.edges))
plt.figure(3, figsize = (10, 20))
plt.subplot(2, 1, 1)
plt.title('Inherent Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('inherent_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.subplot(2, 1, 2)
plt.title('Propagated Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('propagated_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.show()
Filtered network: 61 edges remain out of 407
pd.DataFrame({
'inherent_risk': [id for id, wgt in sorted(G_filt.nodes.data('inherent_risk'), key = lambda p: -p[1])][:15],
'propagated': [id for id, wgt in sorted(G_filt.nodes.data('propagated_risk'), key = lambda p: -p[1])][:15]
}).T
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| inherent_risk | Canada | Spain | Japan | Switzerland | France | Netherlands | Sweden | Germany | United_Kingdom | Italy | Greece | Austria | Australia | Finland | United_States |
| propagated | United_Kingdom | Germany | Spain | Finland | France | United_States | Austria | Netherlands | Switzerland | Ireland | Canada | Italy | Sweden | Greece | Australia |
print(G_nxs_feature['20190331'].nodes['Canada'])
print(G_nxs_feature['20210331'].nodes['Canada'])
print(G_nxs_feature['20190331'].nodes['Portugal'])
print(G_nxs_feature['20210331'].nodes['Portugal'])
print(G_nxs_feature['20190331'].nodes['United_States'])
print(G_nxs_feature['20210331'].nodes['United_States'])
{'inherent_risk': 3.3381755613391912, 'propagated_risk': 0.04460473905587458}
{'inherent_risk': 3.279436230223105, 'propagated_risk': 0.04292556774093095}
{'inherent_risk': 0.3078149345402036, 'propagated_risk': 0.03457135581398739}
{'inherent_risk': 0.30144023477885445, 'propagated_risk': 0.03807989137416631}
{'inherent_risk': 0.34856712915498195, 'propagated_risk': 0.055509450386075215}
{'inherent_risk': 0.3604048114384204, 'propagated_risk': 0.05387045039652726}
G_origin = G_nxs_feature['20210331']
G_remove = G_origin.copy()
G_remove.remove_node('Japan')
G_remove.remove_node('Canada')
G_remove.remove_node('Spain')
G_remove.remove_node('Switzerland')
G_remove.remove_node('Netherlands')
print("nodes: ", nx.number_of_nodes(G_remove))
print("edges: ", nx.number_of_edges(G_remove))
nodes: 17 edges: 241
# recalculate: lose out-strength, with old in-strength
print("***process")
in_strength = G_origin.in_degree(weight = 'claims_held')
out_strength = G_remove.out_degree(weight = 'claims_held')
# add norm
for src, tar, wgt in G_remove.edges.data('claims_held'):
G_remove.edges[src, tar]['claims_held_norm'] = wgt / out_strength[src]
G_remove.edges[src, tar]['claims_held_norm_reciprocal'] = 1 / G_remove.edges[src, tar]['claims_held_norm']
# add inherent risk
for node in list(G_remove.nodes(data=False)):
G_remove.nodes[node]['inherent_risk'] = in_strength[node] / out_strength[node]
# add propagated risk
propagated_risk = nx.pagerank(G = G_remove.reverse(), # The borrower has higher risk, so change the direction
nstart = dict(G_remove.nodes.data('inherent_risk')),
weight = 'claims_held_norm')
for node in list(G_remove.nodes(data=False)):
G_remove.nodes[node]['propagated_risk'] = propagated_risk[node]
nx.write_graphml_lxml(G_remove, "20210331_remove.graphml")
print("***finish")
***process ***finish
def disparity_filter(G, alpha):
s = G.out_degree(weight = 'claims_held')
k = G.out_degree(weight = None)
def pij(i, j):
w = G.edges[i, j]['claims_held']
return ((1 - (w / s[i])) ** (k[i] - 1)) < alpha
return nx.subgraph_view(G, filter_edge = pij)
G_filt = disparity_filter(G_remove, .1)
print('Filtered network:', len(G_filt.edges), 'edges remain out of', len(G_remove.edges))
plt.figure(3, figsize = (10, 20))
plt.subplot(2, 1, 1)
plt.title('Inherent Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('inherent_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.subplot(2, 1, 2)
plt.title('Propagated Risk')
nx.draw(
G_filt,
pos = nx.circular_layout(G_filt),
node_size=1500,
node_color = [wgt for _, wgt in G_filt.nodes.data('propagated_risk')],
cmap = mpl.cm.coolwarm,
with_labels = True,
font_color = 'black',
font_size = 10,
edge_color = [np.log(wgt) for _, _, wgt in G_filt.edges.data('claims_held')],
edge_cmap = mpl.cm.coolwarm,
arrowsize = 15,
connectionstyle="arc3,rad=0.1")
plt.show()
Filtered network: 39 edges remain out of 241
pd.DataFrame({
'inherent_risk_before_deletion': [(id, wgt) for id, wgt in sorted(G_origin.nodes.data('inherent_risk'), key = lambda p: -p[1])][:10],
'inherent_risk_after_deletion': [(id, wgt) for id, wgt in sorted(G_remove.nodes.data('inherent_risk'), key = lambda p: -p[1])][:10],
'propagated_risk_before_deletion': [(id, wgt) for id, wgt in sorted(G_origin.nodes.data('propagated_risk'), key = lambda p: -p[1])][:10],
'propagated_risk_after_deletion': [(id, wgt) for id, wgt in sorted(G_remove.nodes.data('propagated_risk'), key = lambda p: -p[1])][:10]
})
| inherent_risk_before_deletion | inherent_risk_after_deletion | propagated_risk_before_deletion | propagated_risk_after_deletion | |
|---|---|---|---|---|
| 0 | (Canada, 3.279436230223105) | (France, 2.819779165253216) | (United_Kingdom, 0.0692381146915677) | (United_Kingdom, 0.09543567721705071) |
| 1 | (Spain, 2.8982666368586854) | (Sweden, 2.220080819270625) | (Spain, 0.06850635252576369) | (Greece, 0.08241168605425063) |
| 2 | (Japan, 2.871092610464302) | (Greece, 2.082681121519881) | (Greece, 0.05982975073514005) | (Germany, 0.07645097147100623) |
| 3 | (Switzerland, 2.4237244451641877) | (United_Kingdom, 1.8783333937463362) | (Germany, 0.0584028473434621) | (France, 0.07594001298782484) |
| 4 | (Netherlands, 2.006200687678921) | (Italy, 1.2450947577997007) | (France, 0.055099320772883945) | (United_States, 0.07315915235061783) |
| 5 | (France, 1.8393602113378291) | (Germany, 1.1689268482463466) | (Finland, 0.05503877576059898) | (Portugal, 0.06666071637125452) |
| 6 | (Greece, 1.744085786388376) | (Portugal, 1.0881602905207552) | (Netherlands, 0.05492860343159948) | (Italy, 0.06503683206275343) |
| 7 | (Sweden, 1.6880519661675164) | (United_States, 1.081108193987226) | (United_States, 0.05387045039652726) | (Finland, 0.06452570694165613) |
| 8 | (United_Kingdom, 1.012396006676269) | (Australia, 1.0509549081343135) | (Ireland, 0.04995740986990863) | (Ireland, 0.06297460599915577) |
| 9 | (Italy, 0.9163581915535357) | (Austria, 0.7027345048263981) | (Austria, 0.048390898919203604) | (Austria, 0.05891658977795809) |
G_temp = gt.load_graph("20210331_remove.graphml")
entropys = []
states = []
for i in range(10):
state = gt.minimize_nested_blockmodel_dl(G_temp,
state_args=dict(recs=[G_temp.properties[('e', 'claims_held')]], rec_types=["real-exponential"]))
entropys.append(state.entropy())
states.append(state)
pd.DataFrame({'entropys': entropys}).describe()
| entropys | |
|---|---|
| count | 10.000000 |
| mean | 1068.025150 |
| std | 37.999594 |
| min | 1050.155279 |
| 25% | 1050.155279 |
| 50% | 1052.570679 |
| 75% | 1066.777471 |
| max | 1172.854414 |
index = np.argmin(entropys)
print(index)
print(entropys[index])
states[index].draw(vertex_text = G_temp.properties[('v', '_graphml_vertex_id')],
vertex_size = 10)
1 1050.1552793425947
(<VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc8090e59a0, at 0x7fc8091f8eb0>, <GraphView object, directed, with 23 vertices and 22 edges, edges filtered by (<EdgePropertyMap object with value type 'bool', for Graph 0x7fc8091f82e0, at 0x7fc809260be0>, False), vertices filtered by (<VertexPropertyMap object with value type 'bool', for Graph 0x7fc8091f82e0, at 0x7fc809260f40>, False), at 0x7fc8091f82e0>, <VertexPropertyMap object with value type 'vector<double>', for Graph 0x7fc8091f82e0, at 0x7fc809260eb0>)
G_filt.out_edges('Portugal', data = 'claims_held_norm')
OutEdgeDataView([('Portugal', 'Germany', 0.18101038635767505), ('Portugal', 'France', 0.5085975156715764)])
G_filt.out_edges('Ireland', data = 'claims_held_norm')
OutEdgeDataView([('Ireland', 'France', 0.1834157925779371), ('Ireland', 'United_Kingdom', 0.24105556326413083), ('Ireland', 'United_States', 0.3195601801260412)])